use of com.ibm.cohort.tooling.s3.S3Configuration in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class SettingsProcessor method processSettings.
@NotNull
public S3Configuration processSettings(@NotNull final Map<String, String> sharedConfigParameters, @NotNull final Map<String, String> artifactStorageSettings) {
final String certDirectory = TrustedCertificatesDirectory.getAllCertificatesDirectoryFromHome(myHome.getPath());
final Map<String, String> storageSettings = new HashMap<>(SSLParamUtil.putSslDirectory(artifactStorageSettings, certDirectory));
final S3Configuration s3Configuration = new S3Configuration(configuration(sharedConfigParameters, storageSettings), storageSettings);
s3Configuration.validate();
return s3Configuration;
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class S3ArtifactsPublisherTest method testMultipleArtifactPublishesIncludedInArtifactList.
@Test
public void testMultipleArtifactPublishesIncludedInArtifactList() throws IOException {
HashMap<File, String> artifacts1 = new HashMap<>();
File file1 = new File("artifact1");
artifacts1.put(file1, "");
List<FileUploadInfo> uploadInfos1 = Collections.singletonList(new FileUploadInfo(file1.getName(), file1.getAbsolutePath(), file1.length(), null));
HashMap<File, String> artifacts2 = new HashMap<>();
File file2 = new File("artifact2");
artifacts2.put(file2, "");
List<FileUploadInfo> uploadInfos2 = Collections.singletonList(new FileUploadInfo(file2.getName(), file2.getAbsolutePath(), file2.length(), null));
AgentArtifactHelper helper = Mockito.mock(AgentArtifactHelper.class);
when(helper.isEnabled(any(), any())).thenReturn(true);
AgentRunningBuild build = Mockito.mock(AgentRunningBuild.class);
HashMap<String, String> storageSettings = new HashMap<>();
storageSettings.put("aws.region.name", "test");
storageSettings.put("secure:aws.secret.access.key", "test");
storageSettings.put("aws.access.key.id", "test");
storageSettings.put("aws.credentials.type", "aws.access.keys");
storageSettings.put("storage.s3.bucket.name", "BUCKET_NAME");
storageSettings.put("aws.environment", "custom");
storageSettings.put("aws.service.endpoint", "http://localhost");
when(build.getSharedConfigParameters()).thenReturn(Collections.emptyMap());
when(build.getArtifactStorageSettings()).thenReturn(storageSettings);
when(build.getBuildLogger()).thenReturn(Mockito.mock(BuildProgressLogger.class, RETURNS_DEEP_STUBS));
CurrentBuildTracker tracker = Mockito.mock(CurrentBuildTracker.class);
when(tracker.getCurrentBuild()).thenReturn(build);
BuildAgentConfiguration config = Mockito.mock(BuildAgentConfiguration.class);
when(config.getAgentHomeDirectory()).thenReturn(new File(""));
PresignedUrlsProviderClientFactory clientFactory = Mockito.mock(PresignedUrlsProviderClientFactory.class);
ExtensionHolder holder = Mockito.mock(ExtensionHolder.class);
EventDispatcher<AgentLifeCycleListener> dispatcher = EventDispatcher.create(AgentLifeCycleListener.class);
S3FileUploaderFactory uploaderFactory = Mockito.mock(S3FileUploaderFactory.class);
final S3Configuration s3Configuration = Mockito.mock(S3Configuration.class);
final S3UploadLogger s3UploadLogger = Mockito.mock(S3UploadLogger.class);
S3FileUploader uploader = new S3FileUploader(s3Configuration, s3UploadLogger) {
@Override
public Collection<UploadStatistics> upload(@NotNull Map<File, String> filesToUpload, @NotNull Supplier<String> interrupter, Consumer<FileUploadInfo> uploadInfoConsumer) throws InvalidSettingsException {
uploadInfos1.forEach(i -> uploadInfoConsumer.accept(i));
return null;
}
};
when(uploaderFactory.create(any(), any(), any())).thenReturn(uploader);
S3ArtifactsPublisher publisher = new S3ArtifactsPublisher(helper, dispatcher, tracker, config, clientFactory, uploaderFactory, holder);
publisher.publishFiles(artifacts1);
ArgumentCaptor<List<ArtifactDataInstance>> argumentCaptor = ArgumentCaptor.forClass(List.class);
Mockito.verify(helper, times(1)).publishArtifactList(argumentCaptor.capture(), any());
List<ArtifactDataInstance> value1 = new ArrayList<>(argumentCaptor.getValue());
assertEquals("First publishing run should have 1 artifact in the list", value1.size(), 1);
S3FileUploader uploader2 = new S3FileUploader(s3Configuration, s3UploadLogger) {
@Override
public Collection<UploadStatistics> upload(@NotNull Map<File, String> filesToUpload, @NotNull Supplier<String> interrupter, Consumer<FileUploadInfo> uploadInfoConsumer) throws InvalidSettingsException {
uploadInfos2.forEach(i -> uploadInfoConsumer.accept(i));
return null;
}
};
when(uploaderFactory.create(any(), any(), any())).thenReturn(uploader2);
publisher.publishFiles(artifacts2);
Mockito.verify(helper, times(2)).publishArtifactList(argumentCaptor.capture(), any());
List<ArtifactDataInstance> value2 = argumentCaptor.getValue();
assertEquals("First publishing run should have 2 artifacts in the list", value2.size(), 2);
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class S3ArtifactsPublisher method getFileUploader.
@NotNull
private S3FileUploader getFileUploader(@NotNull final AgentRunningBuild build, FlowLogger flowLogger) {
if (myFileUploader == null) {
Collection<ArtifactTransportAdditionalHeadersProvider> headersProviders = myExtensionHolder.getExtensions(ArtifactTransportAdditionalHeadersProvider.class);
final SettingsProcessor settingsProcessor = new SettingsProcessor(myBuildAgentConfiguration.getAgentHomeDirectory());
final S3Configuration s3Configuration = settingsProcessor.processSettings(build.getSharedConfigParameters(), build.getArtifactStorageSettings());
s3Configuration.setPathPrefix(getPathPrefix(build));
myFileUploader = myUploaderFactory.create(s3Configuration, CompositeS3UploadLogger.compose(new BuildLoggerS3Logger(flowLogger), new S3Log4jUploadLogger()), () -> myPresignedUrlsProviderClientFactory.createClient(teamcityConnectionConfiguration(build), headersProviders));
}
return myFileUploader;
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project syndesis by syndesisio.
the class AWSS3RawOptionsTest method createCamelContext.
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
camelContext.setAutoStartup(false);
camelContext.addComponent("aws-s3", new S3Component() {
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
final S3Configuration configuration = new S3Configuration();
setProperties(configuration, parameters);
return new S3Endpoint(uri, this, configuration) {
@Override
public void doStart() throws Exception {
// don't let the endpoint to start as it would try to
// process the keys
}
};
}
});
return camelContext;
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project spring-cloud-aws by awspring.
the class S3AutoConfiguration method s3ServiceConfiguration.
private S3Configuration s3ServiceConfiguration() {
S3Configuration.Builder config = S3Configuration.builder();
PropertyMapper propertyMapper = PropertyMapper.get();
propertyMapper.from(properties::getAccelerateModeEnabled).whenNonNull().to(config::accelerateModeEnabled);
propertyMapper.from(properties::getChecksumValidationEnabled).whenNonNull().to(config::checksumValidationEnabled);
propertyMapper.from(properties::getChunkedEncodingEnabled).whenNonNull().to(config::chunkedEncodingEnabled);
propertyMapper.from(properties::getPathStyleAccessEnabled).whenNonNull().to(config::pathStyleAccessEnabled);
propertyMapper.from(properties::getUseArnRegionEnabled).whenNonNull().to(config::useArnRegionEnabled);
return config.build();
}
Aggregations