use of com.epam.pipeline.entity.datastorage.aws.S3bucketDataStorage in project cloud-pipeline by epam.
the class DataStorageManager method generateTemporaryCredentials.
public AbstractTemporaryCredentials generateTemporaryCredentials(List<DataStorageAction> actions) {
AbstractDataStorage dataStorage = ListUtils.emptyIfNull(actions).stream().findFirst().map(action -> load(action.getId())).orElseThrow(() -> new IllegalArgumentException("Actions are not provided"));
Assert.isTrue(dataStorage instanceof S3bucketDataStorage, "Temporary credentials are supported only for S3 Buckets.");
S3bucketDataStorage bucket = (S3bucketDataStorage) dataStorage;
AwsRegion awsRegion = awsRegionManager.getAwsRegion(bucket);
actions.forEach(action -> {
AbstractDataStorage loaded = action.getId().equals(dataStorage.getId()) ? dataStorage : load(action.getId());
Assert.isTrue(loaded instanceof S3bucketDataStorage, "Temporary credentials are supported only for S3 Buckets.");
action.setBucketName(loaded.getPath());
Long regionId = ((S3bucketDataStorage) loaded).getRegionId();
Assert.isTrue(Objects.equals(regionId, bucket.getRegionId()), "Actions shall be requested for buckets from the same region");
});
return dataStorageFactory.temporaryCredentials(bucket.getType()).withRole(preferenceManager.getPreference(SystemPreferences.DATA_STORAGE_TEMP_CREDENTIALS_ROLE)).withDurationSeconds(preferenceManager.getPreference(SystemPreferences.DATA_STORAGE_TEMP_CREDENTIALS_DURATION)).withKmsArn(awsRegion.getKmsKeyArn()).withRegionId(awsRegion.getAwsRegionName()).generate(actions);
}
use of com.epam.pipeline.entity.datastorage.aws.S3bucketDataStorage in project cloud-pipeline by epam.
the class FolderManagerGetProjectTest method getProjectShouldWorkWithDataStorageAsInputEntity.
@Test
public void getProjectShouldWorkWithDataStorageAsInputEntity() {
S3bucketDataStorage dataStorage = new S3bucketDataStorage(1L, "dataStorage", "path_to_bucket");
dataStorage.setParent(folder3);
Mockito.when(entityManager.load(Matchers.any(AclClass.class), Matchers.any(Long.class))).thenReturn(dataStorage);
Folder actualFolder = folderManager.getProject(dataStorage.getId(), AclClass.DATA_STORAGE);
assertFolders(folder2, actualFolder);
}
use of com.epam.pipeline.entity.datastorage.aws.S3bucketDataStorage in project cloud-pipeline by epam.
the class PipelineConfigurationManagerTest method setUp.
@Before
public void setUp() throws Exception {
registry = new DockerRegistry();
registry.setPath(TEST_REPO);
registry.setOwner(TEST_USER);
dockerRegistryDao.createDockerRegistry(registry);
library = new ToolGroup();
library.setName(TOOL_GROUP_NAME);
library.setRegistryId(registry.getId());
library.setOwner(TEST_USER);
toolGroupDao.createToolGroup(library);
tool = new Tool();
tool.setImage(TEST_IMAGE);
tool.setRam(TEST_RAM);
tool.setCpu(TEST_CPU);
tool.setOwner(TEST_USER);
tool.setRegistryId(registry.getId());
tool.setToolGroupId(library.getId());
toolDao.createTool(tool);
// Data storages of user 1
NFSDataStorage dataStorage = new NFSDataStorage(dataStorageDao.createDataStorageId(), "testNFS", "test/path1");
dataStorage.setMountOptions("testMountOptions1");
dataStorage.setMountPoint("/some/other/path");
dataStorage.setOwner(TEST_OWNER1);
dataStorageDao.createDataStorage(dataStorage);
dataStorages.add(dataStorage);
S3bucketDataStorage bucketDataStorage = new S3bucketDataStorage(dataStorageDao.createDataStorageId(), "testBucket", "test/path2");
bucketDataStorage.setOwner(TEST_OWNER1);
dataStorageDao.createDataStorage(bucketDataStorage);
dataStorages.add(bucketDataStorage);
// Data storages of user 2
dataStorage = new NFSDataStorage(dataStorageDao.createDataStorageId(), "testNFS2", "test/path3");
dataStorage.setMountOptions("testMountOptions2");
dataStorage.setOwner(TEST_OWNER2);
dataStorageDao.createDataStorage(dataStorage);
dataStorages.add(dataStorage);
bucketDataStorage = new S3bucketDataStorage(dataStorageDao.createDataStorageId(), "testBucket2", "test/path4");
bucketDataStorage.setOwner(TEST_OWNER2);
dataStorageDao.createDataStorage(bucketDataStorage);
dataStorages.add(bucketDataStorage);
dataStorages.forEach(ds -> aclTestDao.createAclForObject(ds));
aclTestDao.grantPermissions(dataStorage, TEST_OWNER1, Collections.singletonList((AclPermission) AclPermission.READ));
}
use of com.epam.pipeline.entity.datastorage.aws.S3bucketDataStorage in project cloud-pipeline by epam.
the class AbstractDataStorageMapper method fillS3Fields.
@AfterMapping
public void fillS3Fields(AbstractDataStorage dataStorage, @MappingTarget DataStorageVO dataStorageVO) {
if (dataStorage instanceof S3bucketDataStorage) {
S3bucketDataStorage s3bucketDataStorage = (S3bucketDataStorage) dataStorage;
dataStorageVO.setAllowedCidrs(s3bucketDataStorage.getAllowedCidrs());
dataStorageVO.setRegionId(s3bucketDataStorage.getRegionId());
}
}
use of com.epam.pipeline.entity.datastorage.aws.S3bucketDataStorage in project cloud-pipeline by epam.
the class DataStorageDaoTest method setUp.
@Before
public void setUp() {
testFolder = buildFolder(null);
awsRegion = new AwsRegion();
awsRegion.setName("Default");
awsRegion.setDefault(true);
awsRegion.setAwsRegionName("us-east-1");
awsRegionDao.create(awsRegion);
s3Bucket = new S3bucketDataStorage(null, TEST_STORAGE_NAME, TEST_STORAGE_PATH);
s3Bucket.setDescription("testDescription");
s3Bucket.setParentFolderId(testFolder.getId());
s3Bucket.setRegionId(awsRegion.getId());
s3Bucket.setOwner(TEST_OWNER);
s3Bucket.setMountPoint("testMountPoint");
s3Bucket.setMountOptions("testMountOptions");
s3Bucket.setShared(true);
s3Bucket.setAllowedCidrs(Arrays.asList("test1", "test2"));
policy = new StoragePolicy();
policy.setBackupDuration(BACKUP_DURATION);
policy.setLongTermStorageDuration(LTS_DURATION);
policy.setShortTermStorageDuration(STS_DURATION);
policy.setVersioningEnabled(true);
s3Bucket.setStoragePolicy(policy);
nfsStorage = new NFSDataStorage(null, "NFS_STORAGE", "nfs_path");
nfsStorage.setOwner(TEST_OWNER);
nfsStorage.setDescription("NFS");
nfsStorage.setParentFolderId(testFolder.getId());
nfsStorage.setMountOptions("-s");
nfsStorage.setMountPoint("nfs");
}
Aggregations