Search in sources :

Example 6 with DataStorageFile

use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.

the class S3Helper method getFile.

private DataStorageFile getFile(AmazonS3 client, String bucket, String path) {
    TimeZone tz = TimeZone.getTimeZone("UTC");
    DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
    df.setTimeZone(tz);
    ListObjectsRequest req = new ListObjectsRequest();
    req.setBucketName(bucket);
    req.setPrefix(path);
    req.setDelimiter(S3Constants.DELIMITER);
    ObjectListing listing = client.listObjects(req);
    for (S3ObjectSummary s3ObjectSummary : listing.getObjectSummaries()) {
        String relativePath = s3ObjectSummary.getKey();
        if (relativePath.equalsIgnoreCase(path)) {
            String fileName = relativePath.substring(path.length());
            DataStorageFile file = new DataStorageFile();
            file.setName(fileName);
            file.setPath(relativePath);
            file.setSize(s3ObjectSummary.getSize());
            file.setChanged(df.format(s3ObjectSummary.getLastModified()));
            Map<String, String> labels = new HashMap<>();
            if (s3ObjectSummary.getStorageClass() != null) {
                labels.put("StorageClass", s3ObjectSummary.getStorageClass());
            }
            file.setLabels(labels);
            return file;
        }
    }
    return null;
}
Also used : ListObjectsRequest(com.amazonaws.services.s3.model.ListObjectsRequest) TimeZone(java.util.TimeZone) DataStorageFile(com.epam.pipeline.entity.datastorage.DataStorageFile) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DateFormat(java.text.DateFormat) SimpleDateFormat(java.text.SimpleDateFormat) ObjectListing(com.amazonaws.services.s3.model.ObjectListing) S3ObjectSummary(com.amazonaws.services.s3.model.S3ObjectSummary) SimpleDateFormat(java.text.SimpleDateFormat)

Example 7 with DataStorageFile

use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.

the class S3Helper method convertToStorageFile.

private DataStorageFile convertToStorageFile(S3ObjectSummary s3ObjectSummary) {
    String relativePath = s3ObjectSummary.getKey();
    if (StringUtils.endsWithIgnoreCase(relativePath, S3Helper.FOLDER_TOKEN_FILE.toLowerCase())) {
        return null;
    }
    if (relativePath.endsWith(S3Helper.DELIMITER)) {
        relativePath = relativePath.substring(0, relativePath.length() - 1);
    }
    DataStorageFile file = new DataStorageFile();
    file.setName(relativePath);
    file.setPath(relativePath);
    file.setSize(s3ObjectSummary.getSize());
    file.setVersion(null);
    file.setChanged(S3Helper.AWS_DATE_FORMAT.format(s3ObjectSummary.getLastModified()));
    file.setDeleteMarker(null);
    Map<String, String> labels = new HashMap<>();
    if (s3ObjectSummary.getStorageClass() != null) {
        labels.put("StorageClass", s3ObjectSummary.getStorageClass());
    }
    file.setLabels(labels);
    return file;
}
Also used : DataStorageFile(com.epam.pipeline.entity.datastorage.DataStorageFile) HashMap(java.util.HashMap)

Example 8 with DataStorageFile

use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.

the class S3Helper method listFiles.

private void listFiles(AmazonS3 client, String bucket) {
    ListObjectsV2Request req = new ListObjectsV2Request();
    req.setBucketName(bucket);
    req.setPrefix("");
    ListObjectsV2Result listing;
    try (IndexRequestContainer walker = new IndexRequestContainer(bulkRequestCreator, bulkInsertSize)) {
        do {
            listing = client.listObjectsV2(req);
            for (S3ObjectSummary s3ObjectSummary : listing.getObjectSummaries()) {
                DataStorageFile file = convertToStorageFile(s3ObjectSummary);
                if (file != null) {
                    if (enableTags) {
                        file.setTags(listObjectTags(client, bucket, s3ObjectSummary.getKey()));
                    }
                    walker.add(createIndexRequest(file));
                }
            }
            req.setContinuationToken(listing.getNextContinuationToken());
        } while (listing.isTruncated());
    }
}
Also used : DataStorageFile(com.epam.pipeline.entity.datastorage.DataStorageFile) ListObjectsV2Request(com.amazonaws.services.s3.model.ListObjectsV2Request) ListObjectsV2Result(com.amazonaws.services.s3.model.ListObjectsV2Result) IndexRequestContainer(com.epam.pipeline.elasticsearchagent.service.impl.IndexRequestContainer) S3ObjectSummary(com.amazonaws.services.s3.model.S3ObjectSummary)

Example 9 with DataStorageFile

use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.

the class DataStorageController method uploadStream.

@RequestMapping(value = "/datastorage/{id}/upload/stream", method = RequestMethod.POST)
@ResponseBody
public Result<List<DataStorageFile>> uploadStream(HttpServletRequest request, @PathVariable Long id, @RequestParam(value = PATH, required = false) String folder) throws IOException, FileUploadException {
    Assert.isTrue(ServletFileUpload.isMultipartContent(request), "Not a multipart request");
    ServletFileUpload upload = new ServletFileUpload();
    FileItemIterator iterator = upload.getItemIterator(request);
    Assert.isTrue(iterator.hasNext(), NO_FILES_SPECIFIED);
    boolean found = false;
    List<DataStorageFile> uploadedFiles = new ArrayList<>();
    while (iterator.hasNext()) {
        FileItemStream stream = iterator.next();
        if (!stream.isFormField()) {
            found = true;
            try (InputStream dataStream = stream.openStream()) {
                // TODO: try with Buffered streams
                uploadedFiles.add(dataStorageApiService.createDataStorageFile(id, folder, stream.getName(), dataStream));
            }
        }
    }
    Assert.isTrue(found, NO_FILES_SPECIFIED);
    return Result.success(uploadedFiles);
}
Also used : DataStorageFile(com.epam.pipeline.entity.datastorage.DataStorageFile) ServletFileUpload(org.apache.commons.fileupload.servlet.ServletFileUpload) FileItemStream(org.apache.commons.fileupload.FileItemStream) InputStream(java.io.InputStream) ArrayList(java.util.ArrayList) FileItemIterator(org.apache.commons.fileupload.FileItemIterator) RequestMapping(org.springframework.web.bind.annotation.RequestMapping) ResponseBody(org.springframework.web.bind.annotation.ResponseBody)

Example 10 with DataStorageFile

use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.

the class DataStorageManager method getDataStorageItemWithTags.

public AbstractDataStorageItem getDataStorageItemWithTags(final Long dataStorageId, final String path, Boolean showVersion) {
    List<AbstractDataStorageItem> dataStorageItems = getDataStorageItems(dataStorageId, path, showVersion, null, null).getResults();
    if (CollectionUtils.isEmpty(dataStorageItems)) {
        return null;
    }
    DataStorageFile dataStorageFile = (DataStorageFile) dataStorageItems.get(0);
    if (MapUtils.isEmpty(dataStorageFile.getVersions())) {
        dataStorageFile.setTags(loadDataStorageObjectTags(dataStorageId, path, null));
    } else {
        dataStorageFile.getVersions().forEach((version, item) -> item.setTags(loadDataStorageObjectTags(dataStorageId, path, version)));
    }
    return dataStorageFile;
}
Also used : DataStorageFile(com.epam.pipeline.entity.datastorage.DataStorageFile) AbstractDataStorageItem(com.epam.pipeline.entity.datastorage.AbstractDataStorageItem)

Aggregations

DataStorageFile (com.epam.pipeline.entity.datastorage.DataStorageFile)15 AbstractDataStorageItem (com.epam.pipeline.entity.datastorage.AbstractDataStorageItem)5 DataStorageListing (com.epam.pipeline.entity.datastorage.DataStorageListing)4 NFSDataStorage (com.epam.pipeline.entity.datastorage.nfs.NFSDataStorage)4 File (java.io.File)4 HashMap (java.util.HashMap)4 Test (org.junit.Test)4 S3ObjectSummary (com.amazonaws.services.s3.model.S3ObjectSummary)3 AbstractSpringTest (com.epam.pipeline.AbstractSpringTest)3 DataStorageFolder (com.epam.pipeline.entity.datastorage.DataStorageFolder)3 InputStream (java.io.InputStream)3 Matchers.anyString (org.mockito.Matchers.anyString)3 ListObjectsV2Request (com.amazonaws.services.s3.model.ListObjectsV2Request)2 ListObjectsV2Result (com.amazonaws.services.s3.model.ListObjectsV2Result)2 MessageHelper (com.epam.pipeline.common.MessageHelper)2 DataStorageDao (com.epam.pipeline.dao.datastorage.DataStorageDao)2 AbstractDataStorage (com.epam.pipeline.entity.datastorage.AbstractDataStorage)2 DataStorageException (com.epam.pipeline.entity.datastorage.DataStorageException)2 DataStorageStreamingContent (com.epam.pipeline.entity.datastorage.DataStorageStreamingContent)2 CmdExecutor (com.epam.pipeline.manager.CmdExecutor)2