use of com.epam.pipeline.entity.datastorage.DataStorageFolder in project cloud-pipeline by epam.
the class NFSStorageProviderTest method testMoveDeleteFile.
@Test
public void testMoveDeleteFile() {
NFSDataStorage dataStorage = new NFSDataStorage(0L, "testStorage", TEST_PATH + 3 + ":/test");
nfsProvider.createStorage(dataStorage);
String testFileName = "testFile.txt";
String testFolderName = "testFolder";
String testFolder2Name = "testFolder2";
nfsProvider.createFile(dataStorage, testFileName, "testContent".getBytes());
nfsProvider.createFolder(dataStorage, testFolderName);
nfsProvider.createFolder(dataStorage, testFolder2Name);
File dataStorageRoot = new File(testMountPoint, TEST_PATH + 3 + "/test");
String newFilePath = testFolderName + "/" + testFileName;
DataStorageFile file = nfsProvider.moveFile(dataStorage, testFileName, newFilePath);
Assert.assertEquals(newFilePath, file.getPath());
File oldFileLocation = new File(dataStorageRoot, testFileName);
File newFileLocation = new File(dataStorageRoot, newFilePath);
Assert.assertTrue(newFileLocation.exists());
Assert.assertFalse(oldFileLocation.exists());
String newFolder2Path = testFolderName + "/" + testFolder2Name;
DataStorageFolder folder = nfsProvider.moveFolder(dataStorage, testFolder2Name, newFolder2Path);
Assert.assertEquals(newFolder2Path, folder.getPath());
File oldFolderLocation = new File(dataStorageRoot, testFolder2Name);
File newFolderLocation = new File(dataStorageRoot, newFolder2Path);
Assert.assertTrue(newFolderLocation.exists());
Assert.assertFalse(oldFolderLocation.exists());
nfsProvider.deleteFile(dataStorage, newFilePath, null, true);
Assert.assertFalse(newFileLocation.exists());
nfsProvider.deleteFolder(dataStorage, newFolder2Path, true);
Assert.assertFalse(newFolderLocation.exists());
}
use of com.epam.pipeline.entity.datastorage.DataStorageFolder in project cloud-pipeline by epam.
the class NFSStorageProvider method createFolder.
@Override
public DataStorageFolder createFolder(NFSDataStorage dataStorage, String path) throws DataStorageException {
File dataStorageDir = mount(dataStorage);
File folder = new File(dataStorageDir, path);
if (!folder.mkdirs()) {
throw new DataStorageException(messageHelper.getMessage(MessageConstants.ERROR_DATASTORAGE_NFS_CREATE_FOLDER, dataStorage.getPath()));
}
try {
setUmask(folder);
} catch (IOException e) {
throw new DataStorageException(messageHelper.getMessage(MessageConstants.ERROR_DATASTORAGE_CANNOT_CREATE_FILE, folder.getPath()), e);
}
return new DataStorageFolder(path, folder);
}
use of com.epam.pipeline.entity.datastorage.DataStorageFolder in project cloud-pipeline by epam.
the class NFSStorageProvider method getItems.
@Override
public DataStorageListing getItems(NFSDataStorage dataStorage, String path, Boolean showVersion, Integer pageSize, String marker) {
File dataStorageRoot = mount(dataStorage);
File dir = path != null ? new File(dataStorageRoot, path) : dataStorageRoot;
long offset = StringUtils.isNumeric(marker) ? Long.parseLong(marker) : 1;
try (Stream<Path> dirStream = Files.walk(dir.toPath(), 1)) {
List<AbstractDataStorageItem> dataStorageItems = dirStream.sorted().skip(// First element is a directory itself
offset).limit(pageSize).map(p -> {
File file = p.toFile();
AbstractDataStorageItem item;
if (file.isDirectory()) {
item = new DataStorageFolder();
} else {
// set size if it's a file
DataStorageFile dataStorageFile = new DataStorageFile();
dataStorageFile.setSize(file.length());
dataStorageFile.setChanged(S3Constants.getAwsDateFormat().format(new Date(file.lastModified())));
item = dataStorageFile;
}
item.setName(file.getName());
item.setPath(dataStorageRoot.toURI().relativize(file.toURI()).getPath());
return item;
}).collect(Collectors.toList());
DataStorageListing listing = new DataStorageListing();
listing.setResults(dataStorageItems);
Long nextOffset = offset + pageSize;
try (Stream<Path> nextStream = Files.walk(dir.toPath(), 1)) {
if (nextStream.skip(nextOffset).findFirst().isPresent()) {
listing.setNextPageMarker(nextOffset.toString());
}
}
return listing;
} catch (IOException e) {
throw new DataStorageException(e);
}
}
use of com.epam.pipeline.entity.datastorage.DataStorageFolder in project cloud-pipeline by epam.
the class S3Helper method moveFolder.
public DataStorageFolder moveFolder(String bucket, String rawOldPath, String rawNewPath) throws DataStorageException {
if (StringUtils.isNullOrEmpty(rawOldPath) || StringUtils.isNullOrEmpty(rawNewPath)) {
throw new DataStorageException(PATH_SHOULD_NOT_BE_EMPTY_MESSAGE);
}
final String oldPath = withTrailingDelimiter(rawOldPath);
final String newPath = withTrailingDelimiter(rawNewPath);
final String folderFullPath = newPath.substring(0, newPath.length() - 1);
String[] parts = newPath.split(S3Constants.DELIMITER);
final String folderName = parts[parts.length - 1];
AmazonS3 client = getDefaultS3Client();
if (!itemExists(client, bucket, oldPath, true)) {
throw new DataStorageException(String.format("Folder '%s' does not exist", oldPath));
}
if (itemExists(client, bucket, newPath, true)) {
throw new DataStorageException(String.format("Folder '%s' already exists", newPath));
}
ListObjectsRequest req = new ListObjectsRequest();
req.setBucketName(bucket);
ObjectListing listing = client.listObjects(req);
boolean listingFinished = false;
List<String> oldKeys = new ArrayList<>();
while (!listingFinished) {
for (S3ObjectSummary s3ObjectSummary : listing.getObjectSummaries()) {
if (s3ObjectSummary.getSize() > COPYING_FILE_SIZE_LIMIT) {
throw new DataStorageException(String.format("Moving folder '%s' was aborted because " + "some of its files '%s' size exceeds the limit of %s bytes", oldPath, s3ObjectSummary.getKey(), COPYING_FILE_SIZE_LIMIT));
}
String relativePath = s3ObjectSummary.getKey();
if (relativePath.startsWith(oldPath)) {
oldKeys.add(relativePath);
}
}
if (listing.isTruncated()) {
listing = client.listNextBatchOfObjects(listing);
} else {
listingFinished = true;
}
}
final List<MoveObjectRequest> moveRequests = oldKeys.stream().map(oldKey -> new MoveObjectRequest(oldKey, newPath + oldKey.substring(oldPath.length()))).collect(Collectors.toList());
moveS3Objects(client, bucket, moveRequests);
DataStorageFolder folder = new DataStorageFolder();
folder.setName(folderName);
folder.setPath(folderFullPath);
return folder;
}
use of com.epam.pipeline.entity.datastorage.DataStorageFolder in project cloud-pipeline by epam.
the class S3Helper method createFolder.
public DataStorageFolder createFolder(String bucket, String path) throws DataStorageException {
if (StringUtils.isNullOrEmpty(path) || StringUtils.isNullOrEmpty(path.trim())) {
throw new DataStorageException(PATH_SHOULD_NOT_BE_EMPTY_MESSAGE);
}
String folderPath = path.trim();
if (!folderPath.endsWith(S3Constants.DELIMITER)) {
folderPath += S3Constants.DELIMITER;
}
if (folderPath.startsWith(S3Constants.DELIMITER)) {
folderPath = folderPath.substring(1);
}
final String folderFullPath = folderPath.substring(0, folderPath.length() - 1);
AmazonS3 client = getDefaultS3Client();
if (itemExists(client, bucket, folderPath, true)) {
throw new DataStorageException("Folder already exists");
}
folderPath += S3Constants.FOLDER_TOKEN_FILE;
String[] parts = folderPath.split(S3Constants.DELIMITER);
final String folderName = parts[parts.length - 2];
try {
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setLastModified(new Date());
byte[] contents = "".getBytes();
ByteArrayInputStream byteInputStream = new ByteArrayInputStream(contents);
client.putObject(new PutObjectRequest(bucket, folderPath, byteInputStream, objectMetadata));
DataStorageFolder folder = new DataStorageFolder();
folder.setName(folderName);
folder.setPath(folderFullPath);
return folder;
} catch (SdkClientException e) {
throw new DataStorageException(e.getMessage(), e.getCause());
}
}
Aggregations