use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.
the class S3Helper method getFile.
private DataStorageFile getFile(AmazonS3 client, String bucket, String path) {
TimeZone tz = TimeZone.getTimeZone("UTC");
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
df.setTimeZone(tz);
ListObjectsRequest req = new ListObjectsRequest();
req.setBucketName(bucket);
req.setPrefix(path);
req.setDelimiter(S3Constants.DELIMITER);
ObjectListing listing = client.listObjects(req);
for (S3ObjectSummary s3ObjectSummary : listing.getObjectSummaries()) {
String relativePath = s3ObjectSummary.getKey();
if (relativePath.equalsIgnoreCase(path)) {
String fileName = relativePath.substring(path.length());
DataStorageFile file = new DataStorageFile();
file.setName(fileName);
file.setPath(relativePath);
file.setSize(s3ObjectSummary.getSize());
file.setChanged(df.format(s3ObjectSummary.getLastModified()));
Map<String, String> labels = new HashMap<>();
if (s3ObjectSummary.getStorageClass() != null) {
labels.put("StorageClass", s3ObjectSummary.getStorageClass());
}
file.setLabels(labels);
return file;
}
}
return null;
}
use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.
the class S3Helper method convertToStorageFile.
private DataStorageFile convertToStorageFile(S3ObjectSummary s3ObjectSummary) {
String relativePath = s3ObjectSummary.getKey();
if (StringUtils.endsWithIgnoreCase(relativePath, S3Helper.FOLDER_TOKEN_FILE.toLowerCase())) {
return null;
}
if (relativePath.endsWith(S3Helper.DELIMITER)) {
relativePath = relativePath.substring(0, relativePath.length() - 1);
}
DataStorageFile file = new DataStorageFile();
file.setName(relativePath);
file.setPath(relativePath);
file.setSize(s3ObjectSummary.getSize());
file.setVersion(null);
file.setChanged(S3Helper.AWS_DATE_FORMAT.format(s3ObjectSummary.getLastModified()));
file.setDeleteMarker(null);
Map<String, String> labels = new HashMap<>();
if (s3ObjectSummary.getStorageClass() != null) {
labels.put("StorageClass", s3ObjectSummary.getStorageClass());
}
file.setLabels(labels);
return file;
}
use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.
the class S3Helper method listFiles.
private void listFiles(AmazonS3 client, String bucket) {
ListObjectsV2Request req = new ListObjectsV2Request();
req.setBucketName(bucket);
req.setPrefix("");
ListObjectsV2Result listing;
try (IndexRequestContainer walker = new IndexRequestContainer(bulkRequestCreator, bulkInsertSize)) {
do {
listing = client.listObjectsV2(req);
for (S3ObjectSummary s3ObjectSummary : listing.getObjectSummaries()) {
DataStorageFile file = convertToStorageFile(s3ObjectSummary);
if (file != null) {
if (enableTags) {
file.setTags(listObjectTags(client, bucket, s3ObjectSummary.getKey()));
}
walker.add(createIndexRequest(file));
}
}
req.setContinuationToken(listing.getNextContinuationToken());
} while (listing.isTruncated());
}
}
use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.
the class DataStorageController method uploadStream.
@RequestMapping(value = "/datastorage/{id}/upload/stream", method = RequestMethod.POST)
@ResponseBody
public Result<List<DataStorageFile>> uploadStream(HttpServletRequest request, @PathVariable Long id, @RequestParam(value = PATH, required = false) String folder) throws IOException, FileUploadException {
Assert.isTrue(ServletFileUpload.isMultipartContent(request), "Not a multipart request");
ServletFileUpload upload = new ServletFileUpload();
FileItemIterator iterator = upload.getItemIterator(request);
Assert.isTrue(iterator.hasNext(), NO_FILES_SPECIFIED);
boolean found = false;
List<DataStorageFile> uploadedFiles = new ArrayList<>();
while (iterator.hasNext()) {
FileItemStream stream = iterator.next();
if (!stream.isFormField()) {
found = true;
try (InputStream dataStream = stream.openStream()) {
// TODO: try with Buffered streams
uploadedFiles.add(dataStorageApiService.createDataStorageFile(id, folder, stream.getName(), dataStream));
}
}
}
Assert.isTrue(found, NO_FILES_SPECIFIED);
return Result.success(uploadedFiles);
}
use of com.epam.pipeline.entity.datastorage.DataStorageFile in project cloud-pipeline by epam.
the class DataStorageManager method getDataStorageItemWithTags.
public AbstractDataStorageItem getDataStorageItemWithTags(final Long dataStorageId, final String path, Boolean showVersion) {
List<AbstractDataStorageItem> dataStorageItems = getDataStorageItems(dataStorageId, path, showVersion, null, null).getResults();
if (CollectionUtils.isEmpty(dataStorageItems)) {
return null;
}
DataStorageFile dataStorageFile = (DataStorageFile) dataStorageItems.get(0);
if (MapUtils.isEmpty(dataStorageFile.getVersions())) {
dataStorageFile.setTags(loadDataStorageObjectTags(dataStorageId, path, null));
} else {
dataStorageFile.getVersions().forEach((version, item) -> item.setTags(loadDataStorageObjectTags(dataStorageId, path, version)));
}
return dataStorageFile;
}
Aggregations