use of jetbrains.buildServer.artifacts.ArtifactDataInstance in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class S3ArtifactsPublisherTest method testMultipleArtifactPublishesIncludedInArtifactList.
@Test
public void testMultipleArtifactPublishesIncludedInArtifactList() throws IOException {
HashMap<File, String> artifacts1 = new HashMap<>();
File file1 = new File("artifact1");
artifacts1.put(file1, "");
List<FileUploadInfo> uploadInfos1 = Collections.singletonList(new FileUploadInfo(file1.getName(), file1.getAbsolutePath(), file1.length(), null));
HashMap<File, String> artifacts2 = new HashMap<>();
File file2 = new File("artifact2");
artifacts2.put(file2, "");
List<FileUploadInfo> uploadInfos2 = Collections.singletonList(new FileUploadInfo(file2.getName(), file2.getAbsolutePath(), file2.length(), null));
AgentArtifactHelper helper = Mockito.mock(AgentArtifactHelper.class);
when(helper.isEnabled(any(), any())).thenReturn(true);
AgentRunningBuild build = Mockito.mock(AgentRunningBuild.class);
HashMap<String, String> storageSettings = new HashMap<>();
storageSettings.put("aws.region.name", "test");
storageSettings.put("secure:aws.secret.access.key", "test");
storageSettings.put("aws.access.key.id", "test");
storageSettings.put("aws.credentials.type", "aws.access.keys");
storageSettings.put("storage.s3.bucket.name", "BUCKET_NAME");
storageSettings.put("aws.environment", "custom");
storageSettings.put("aws.service.endpoint", "http://localhost");
when(build.getSharedConfigParameters()).thenReturn(Collections.emptyMap());
when(build.getArtifactStorageSettings()).thenReturn(storageSettings);
when(build.getBuildLogger()).thenReturn(Mockito.mock(BuildProgressLogger.class, RETURNS_DEEP_STUBS));
CurrentBuildTracker tracker = Mockito.mock(CurrentBuildTracker.class);
when(tracker.getCurrentBuild()).thenReturn(build);
BuildAgentConfiguration config = Mockito.mock(BuildAgentConfiguration.class);
when(config.getAgentHomeDirectory()).thenReturn(new File(""));
PresignedUrlsProviderClientFactory clientFactory = Mockito.mock(PresignedUrlsProviderClientFactory.class);
ExtensionHolder holder = Mockito.mock(ExtensionHolder.class);
EventDispatcher<AgentLifeCycleListener> dispatcher = EventDispatcher.create(AgentLifeCycleListener.class);
S3FileUploaderFactory uploaderFactory = Mockito.mock(S3FileUploaderFactory.class);
final S3Configuration s3Configuration = Mockito.mock(S3Configuration.class);
final S3UploadLogger s3UploadLogger = Mockito.mock(S3UploadLogger.class);
S3FileUploader uploader = new S3FileUploader(s3Configuration, s3UploadLogger) {
@Override
public Collection<UploadStatistics> upload(@NotNull Map<File, String> filesToUpload, @NotNull Supplier<String> interrupter, Consumer<FileUploadInfo> uploadInfoConsumer) throws InvalidSettingsException {
uploadInfos1.forEach(i -> uploadInfoConsumer.accept(i));
return null;
}
};
when(uploaderFactory.create(any(), any(), any())).thenReturn(uploader);
S3ArtifactsPublisher publisher = new S3ArtifactsPublisher(helper, dispatcher, tracker, config, clientFactory, uploaderFactory, holder);
publisher.publishFiles(artifacts1);
ArgumentCaptor<List<ArtifactDataInstance>> argumentCaptor = ArgumentCaptor.forClass(List.class);
Mockito.verify(helper, times(1)).publishArtifactList(argumentCaptor.capture(), any());
List<ArtifactDataInstance> value1 = new ArrayList<>(argumentCaptor.getValue());
assertEquals("First publishing run should have 1 artifact in the list", value1.size(), 1);
S3FileUploader uploader2 = new S3FileUploader(s3Configuration, s3UploadLogger) {
@Override
public Collection<UploadStatistics> upload(@NotNull Map<File, String> filesToUpload, @NotNull Supplier<String> interrupter, Consumer<FileUploadInfo> uploadInfoConsumer) throws InvalidSettingsException {
uploadInfos2.forEach(i -> uploadInfoConsumer.accept(i));
return null;
}
};
when(uploaderFactory.create(any(), any(), any())).thenReturn(uploader2);
publisher.publishFiles(artifacts2);
Mockito.verify(helper, times(2)).publishArtifactList(argumentCaptor.capture(), any());
List<ArtifactDataInstance> value2 = argumentCaptor.getValue();
assertEquals("First publishing run should have 2 artifacts in the list", value2.size(), 2);
}
use of jetbrains.buildServer.artifacts.ArtifactDataInstance in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class S3CleanupExtensionIntegrationTest method deletesArtifactsFromS3WithRetry.
@Test
public void deletesArtifactsFromS3WithRetry() {
AWSCredentialsProvider credentialsProvider = myLocalStack.getDefaultCredentialsProvider();
AwsClientBuilder.EndpointConfiguration endpointConfiguration = myLocalStack.getEndpointConfiguration(S3);
Map<String, String> storageSettings = getStorageSettings(credentialsProvider, endpointConfiguration);
storageSettings.put("teamcity.internal.storage.s3.upload.retryDelayMs", "500");
storageSettings.put("teamcity.internal.storage.s3.upload.numberOfRetries", "50");
String artifactPath = "bar";
String expectedContents = "baz";
ArtifactDataInstance artifact = ArtifactDataInstance.create(artifactPath, expectedContents.length());
S3CleanupExtension cleanupExtension = getCleanupExtension(storageSettings, artifact);
AmazonS3 s3 = getS3Client(credentialsProvider, endpointConfiguration);
final AtomicBoolean listenerCalled = new AtomicBoolean(false);
cleanupExtension.registerListener(new AbstractCleanupListener() {
@Override
public void onError(Exception exception, boolean isRecoverable) {
if (exception instanceof AmazonS3Exception && isRecoverable) {
listenerCalled.set(true);
s3.createBucket(BUCKET_NAME);
s3.putObject(BUCKET_NAME, artifactPath, expectedContents);
}
}
});
Mock contextMock = getContextMock();
contextMock.stubs().method("onBuildCleanupError").will(throwException(new RuntimeException("Build cleanup error")));
BuildCleanupContext context = (BuildCleanupContext) contextMock.proxy();
cleanupExtension.prepareBuildsData(context);
cleanupExtension.cleanupBuildsData(context);
assertFalse(s3.doesObjectExist(BUCKET_NAME, artifactPath));
assertTrue(listenerCalled.get());
if (s3.doesBucketExistV2(BUCKET_NAME)) {
s3.deleteBucket(BUCKET_NAME);
}
}
use of jetbrains.buildServer.artifacts.ArtifactDataInstance in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class S3CleanupExtensionIntegrationTest method failsBecauseRetryDoesntHaveEnoughTime.
@Test
public void failsBecauseRetryDoesntHaveEnoughTime() {
if (myTestLogger != null) {
myTestLogger.doNotFailOnErrorMessages();
}
AWSCredentialsProvider credentialsProvider = myLocalStack.getDefaultCredentialsProvider();
AwsClientBuilder.EndpointConfiguration endpointConfiguration = myLocalStack.getEndpointConfiguration(S3);
Map<String, String> storageSettings = getStorageSettings(credentialsProvider, endpointConfiguration);
storageSettings.put("teamcity.internal.storage.s3.upload.retryDelayMs", "200");
storageSettings.put("teamcity.internal.storage.s3.upload.numberOfRetries", "5");
String artifactPath = "bar";
String expectedContents = "baz";
ArtifactDataInstance artifact = ArtifactDataInstance.create(artifactPath, expectedContents.length());
S3CleanupExtension cleanupExtension = getCleanupExtension(storageSettings, artifact);
AtomicInteger tryCount = new AtomicInteger(0);
cleanupExtension.registerListener(new AbstractCleanupListener() {
@Override
public void onError(Exception exception, boolean isRecoverable) {
if (exception instanceof AmazonS3Exception && isRecoverable) {
tryCount.incrementAndGet();
}
}
});
Mock contextMock = getContextMock();
contextMock.stubs().method("onBuildCleanupError");
BuildCleanupContext context = (BuildCleanupContext) contextMock.proxy();
cleanupExtension.prepareBuildsData(context);
cleanupExtension.cleanupBuildsData(context);
assertEquals("Should try deleting object for 6 times", 6, tryCount.get());
}
Aggregations