use of com.epam.pipeline.entity.datastorage.DataStorageException in project cloud-pipeline by epam.
the class NFSStorageProvider method createFolder.
@Override
public DataStorageFolder createFolder(NFSDataStorage dataStorage, String path) throws DataStorageException {
File dataStorageDir = mount(dataStorage);
File folder = new File(dataStorageDir, path);
if (!folder.mkdirs()) {
throw new DataStorageException(messageHelper.getMessage(MessageConstants.ERROR_DATASTORAGE_NFS_CREATE_FOLDER, dataStorage.getPath()));
}
try {
setUmask(folder);
} catch (IOException e) {
throw new DataStorageException(messageHelper.getMessage(MessageConstants.ERROR_DATASTORAGE_CANNOT_CREATE_FILE, folder.getPath()), e);
}
return new DataStorageFolder(path, folder);
}
use of com.epam.pipeline.entity.datastorage.DataStorageException in project cloud-pipeline by epam.
the class NFSStorageProvider method deleteFile.
@Override
public void deleteFile(NFSDataStorage dataStorage, String path, String version, Boolean totally) throws DataStorageException {
File dataStorageDir = mount(dataStorage);
File file = new File(dataStorageDir, path);
try {
Files.delete(file.toPath());
} catch (IOException e) {
throw new DataStorageException(e);
}
}
use of com.epam.pipeline.entity.datastorage.DataStorageException in project cloud-pipeline by epam.
the class NFSStorageProvider method deleteFolder.
@Override
public void deleteFolder(NFSDataStorage dataStorage, String path, Boolean totally) throws DataStorageException {
File dataStorageDir = mount(dataStorage);
File folder = new File(dataStorageDir, path);
try {
FileUtils.deleteDirectory(folder);
} catch (IOException e) {
throw new DataStorageException(e);
}
}
use of com.epam.pipeline.entity.datastorage.DataStorageException in project cloud-pipeline by epam.
the class NFSStorageProvider method getItems.
@Override
public DataStorageListing getItems(NFSDataStorage dataStorage, String path, Boolean showVersion, Integer pageSize, String marker) {
File dataStorageRoot = mount(dataStorage);
File dir = path != null ? new File(dataStorageRoot, path) : dataStorageRoot;
long offset = StringUtils.isNumeric(marker) ? Long.parseLong(marker) : 1;
try (Stream<Path> dirStream = Files.walk(dir.toPath(), 1)) {
List<AbstractDataStorageItem> dataStorageItems = dirStream.sorted().skip(// First element is a directory itself
offset).limit(pageSize).map(p -> {
File file = p.toFile();
AbstractDataStorageItem item;
if (file.isDirectory()) {
item = new DataStorageFolder();
} else {
// set size if it's a file
DataStorageFile dataStorageFile = new DataStorageFile();
dataStorageFile.setSize(file.length());
dataStorageFile.setChanged(S3Constants.getAwsDateFormat().format(new Date(file.lastModified())));
item = dataStorageFile;
}
item.setName(file.getName());
item.setPath(dataStorageRoot.toURI().relativize(file.toURI()).getPath());
return item;
}).collect(Collectors.toList());
DataStorageListing listing = new DataStorageListing();
listing.setResults(dataStorageItems);
Long nextOffset = offset + pageSize;
try (Stream<Path> nextStream = Files.walk(dir.toPath(), 1)) {
if (nextStream.skip(nextOffset).findFirst().isPresent()) {
listing.setNextPageMarker(nextOffset.toString());
}
}
return listing;
} catch (IOException e) {
throw new DataStorageException(e);
}
}
use of com.epam.pipeline.entity.datastorage.DataStorageException in project cloud-pipeline by epam.
the class S3Helper method moveFolder.
public DataStorageFolder moveFolder(String bucket, String rawOldPath, String rawNewPath) throws DataStorageException {
if (StringUtils.isNullOrEmpty(rawOldPath) || StringUtils.isNullOrEmpty(rawNewPath)) {
throw new DataStorageException(PATH_SHOULD_NOT_BE_EMPTY_MESSAGE);
}
final String oldPath = withTrailingDelimiter(rawOldPath);
final String newPath = withTrailingDelimiter(rawNewPath);
final String folderFullPath = newPath.substring(0, newPath.length() - 1);
String[] parts = newPath.split(S3Constants.DELIMITER);
final String folderName = parts[parts.length - 1];
AmazonS3 client = getDefaultS3Client();
if (!itemExists(client, bucket, oldPath, true)) {
throw new DataStorageException(String.format("Folder '%s' does not exist", oldPath));
}
if (itemExists(client, bucket, newPath, true)) {
throw new DataStorageException(String.format("Folder '%s' already exists", newPath));
}
ListObjectsRequest req = new ListObjectsRequest();
req.setBucketName(bucket);
ObjectListing listing = client.listObjects(req);
boolean listingFinished = false;
List<String> oldKeys = new ArrayList<>();
while (!listingFinished) {
for (S3ObjectSummary s3ObjectSummary : listing.getObjectSummaries()) {
if (s3ObjectSummary.getSize() > COPYING_FILE_SIZE_LIMIT) {
throw new DataStorageException(String.format("Moving folder '%s' was aborted because " + "some of its files '%s' size exceeds the limit of %s bytes", oldPath, s3ObjectSummary.getKey(), COPYING_FILE_SIZE_LIMIT));
}
String relativePath = s3ObjectSummary.getKey();
if (relativePath.startsWith(oldPath)) {
oldKeys.add(relativePath);
}
}
if (listing.isTruncated()) {
listing = client.listNextBatchOfObjects(listing);
} else {
listingFinished = true;
}
}
final List<MoveObjectRequest> moveRequests = oldKeys.stream().map(oldKey -> new MoveObjectRequest(oldKey, newPath + oldKey.substring(oldPath.length()))).collect(Collectors.toList());
moveS3Objects(client, bucket, moveRequests);
DataStorageFolder folder = new DataStorageFolder();
folder.setName(folderName);
folder.setPath(folderFullPath);
return folder;
}
Aggregations