use of com.ibm.cohort.tooling.s3.S3Configuration in project teamcity-s3-artifact-storage-plugin by JetBrains.
the class S3ArtifactsPublisherTest method testManyFilesAreCorrectlySendToConsumer.
public void testManyFilesAreCorrectlySendToConsumer() throws IOException {
AgentRunningBuild build = Mockito.mock(AgentRunningBuild.class);
HashMap<String, String> storageSettings = new HashMap<>();
storageSettings.put("aws.region.name", "test");
storageSettings.put("secure:aws.secret.access.key", "test");
storageSettings.put("aws.access.key.id", "test");
storageSettings.put("aws.credentials.type", "aws.access.keys");
storageSettings.put("storage.s3.bucket.name", "BUCKET_NAME");
storageSettings.put("aws.environment", "custom");
storageSettings.put("aws.service.endpoint", "http://localhost");
when(build.getSharedConfigParameters()).thenReturn(Collections.emptyMap());
when(build.getArtifactStorageSettings()).thenReturn(storageSettings);
when(build.getBuildLogger()).thenReturn(Mockito.mock(BuildProgressLogger.class, RETURNS_DEEP_STUBS));
CurrentBuildTracker tracker = Mockito.mock(CurrentBuildTracker.class);
when(tracker.getCurrentBuild()).thenReturn(build);
BuildAgentConfiguration config = Mockito.mock(BuildAgentConfiguration.class);
when(config.getAgentHomeDirectory()).thenReturn(new File(""));
PresignedUrlsProviderClientFactory clientFactory = Mockito.mock(PresignedUrlsProviderClientFactory.class);
ExtensionHolder holder = Mockito.mock(ExtensionHolder.class);
EventDispatcher<AgentLifeCycleListener> dispatcher = EventDispatcher.create(AgentLifeCycleListener.class);
S3FileUploaderFactory uploaderFactory = Mockito.mock(S3FileUploaderFactory.class);
final S3Configuration s3Configuration = Mockito.mock(S3Configuration.class);
final S3UploadLogger s3UploadLogger = Mockito.mock(S3UploadLogger.class);
HashMap<File, String> artifacts = new HashMap<>();
List<FileUploadInfo> uploadInfos = new ArrayList<>();
for (int i = 0; i < 500; i++) {
final File testFile = Files.createTempFile("test", String.valueOf(i)).toFile();
artifacts.put(testFile, "");
uploadInfos.add(new FileUploadInfo(testFile.getName(), testFile.getAbsolutePath(), testFile.length(), null));
}
S3FileUploader uploader = new S3FileUploader(s3Configuration, s3UploadLogger) {
@Override
public Collection<UploadStatistics> upload(@NotNull Map<File, String> filesToUpload, @NotNull Supplier<String> interrupter, Consumer<FileUploadInfo> uploadInfoConsumer) throws InvalidSettingsException {
uploadInfos.stream().parallel().forEach(i -> uploadInfoConsumer.accept(i));
return null;
}
};
when(uploaderFactory.create(any(), any(), any())).thenReturn(uploader);
AgentArtifactHelper helper = new AgentArtifactHelper() {
@Override
public void publishArtifactList(@NotNull List<ArtifactDataInstance> artifacts, @Nullable Map<String, String> commonProperties) {
assertEmpty(artifacts.stream().filter(Objects::isNull).collect(Collectors.toList()), "Should not contain any nulls");
}
@Override
public boolean isEnabled(@NotNull ArtifactsPublisher publisher, @NotNull String path) {
return true;
}
};
S3ArtifactsPublisher publisher = new S3ArtifactsPublisher(helper, dispatcher, tracker, config, clientFactory, uploaderFactory, holder);
publisher.publishFiles(artifacts);
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project quality-measure-and-cohort-service by Alvearie.
the class ValueSetImporter method runWithArgs.
void runWithArgs(String[] args, PrintStream out) throws IOException {
ValueSetImporterArguments arguments = new ValueSetImporterArguments();
Console console = new DefaultConsole(out);
JCommander jc = JCommander.newBuilder().programName("value-set-importer").console(console).addObject(arguments).build();
jc.parse(args);
if (arguments.isDisplayHelp) {
jc.usage();
} else {
arguments.validate();
FhirContext fhirContext = FhirContext.forR4();
// only connect to fhir server if we are not writing it to file system
IGenericClient client = null;
ObjectMapper om = new ObjectMapper();
if (arguments.fileOutputLocation == OutputLocations.NONE) {
FhirServerConfig config = om.readValue(arguments.measureServerConfigFile, FhirServerConfig.class);
client = FhirClientBuilderFactory.newInstance().newFhirClientBuilder(fhirContext).createFhirClient(config);
}
Map<String, String> codeSystemMappings = null;
if (arguments.filename != null) {
codeSystemMappings = ValueSetUtil.getMapFromInputStream(new FileInputStream(new File(arguments.filename)));
}
for (String arg : arguments.spreadsheets) {
try (InputStream is = new FileInputStream(arg)) {
ValueSetArtifact artifact = ValueSetUtil.createArtifact(is, codeSystemMappings);
// only import the value set to fhir server if we are not writing the value set to file system
if (arguments.fileOutputLocation == OutputLocations.NONE) {
String retVal = ValueSetUtil.importArtifact(client, artifact, arguments.overrideValueSets);
if (retVal == null) {
logger.error("Value set already exists! Please provide the override option if you would like to override this value set.");
}
} else {
// write value set to file system
ValueSet vs = artifact.getFhirResource();
// If the valueset id contains urn:oid, remove it to make a valid filename
String valueSetId = vs.getId().startsWith("urn:oid:") ? vs.getId().replace("urn:oid:", "") : vs.getId();
String vsFileName = valueSetId + "." + arguments.filesystemOutputFormat.toString().toLowerCase();
if (arguments.fileOutputLocation == OutputLocations.BOTH || arguments.fileOutputLocation == OutputLocations.S3) {
S3Configuration S3Config = om.readValue(arguments.S3JsonConfigs, S3Configuration.class);
AmazonS3 S3Client = createClient(S3Config.getAccess_key_id(), S3Config.getSecret_access_key(), S3Config.getEndpoint(), S3Config.getLocation());
putToS3(arguments, fhirContext, vs, vsFileName, S3Client);
}
if (arguments.fileOutputLocation == OutputLocations.BOTH || arguments.fileOutputLocation == OutputLocations.LOCAL) {
try (BufferedWriter writer = new BufferedWriter(new FileWriter(arguments.fileSystemOutputPath + System.getProperty("file.separator") + vsFileName))) {
// create the output dir if it doesn't exist
File outputDir = new File(arguments.fileSystemOutputPath);
if (!outputDir.exists()) {
outputDir.mkdir();
}
// write to xml or json format
if (arguments.filesystemOutputFormat == FileFormat.JSON) {
fhirContext.newJsonParser().encodeResourceToWriter(vs, writer);
} else if (arguments.filesystemOutputFormat == FileFormat.XML) {
fhirContext.newXmlParser().encodeResourceToWriter(vs, writer);
}
}
}
}
}
}
}
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project ZarrReader by ome.
the class S3FileSystemStore method setupClient.
private void setupClient() {
String[] pathSplit = root.toString().split(File.separator);
String endpoint = "https://" + pathSplit[1] + File.separator;
URI endpoint_uri;
try {
endpoint_uri = new URI(endpoint);
final S3Configuration config = S3Configuration.builder().pathStyleAccessEnabled(true).build();
AwsCredentials credentials = AnonymousCredentialsProvider.create().resolveCredentials();
client = S3Client.builder().endpointOverride(endpoint_uri).serviceConfiguration(config).region(// Ignored but required by the client
Region.EU_WEST_1).credentialsProvider(StaticCredentialsProvider.create(credentials)).build();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
// e.printStackTrace();
}
}
use of com.ibm.cohort.tooling.s3.S3Configuration in project syndesis by syndesisio.
the class AWSS3VerifierExtension method verifyConnectivity.
// *********************************
// Connectivity validation
// *********************************
@Override
protected Result verifyConnectivity(Map<String, Object> parameters) {
ResultBuilder builder = ResultBuilder.withStatusAndScope(Result.Status.OK, Scope.CONNECTIVITY);
try {
S3Configuration configuration = setProperties(new S3Configuration(), parameters);
AWSCredentials credentials = new BasicAWSCredentials(configuration.getAccessKey(), configuration.getSecretKey());
AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(credentials);
AmazonS3 client = AmazonS3ClientBuilder.standard().withCredentials(credentialsProvider).withRegion(Regions.valueOf(configuration.getRegion())).build();
client.listBuckets();
} catch (SdkClientException e) {
ResultErrorBuilder errorBuilder = ResultErrorBuilder.withCodeAndDescription(VerificationError.StandardCode.AUTHENTICATION, e.getMessage()).detail("aws_s3_exception_message", e.getMessage()).detail(VerificationError.ExceptionAttribute.EXCEPTION_CLASS, e.getClass().getName()).detail(VerificationError.ExceptionAttribute.EXCEPTION_INSTANCE, e);
builder.error(errorBuilder.build());
} catch (Exception e) {
builder.error(ResultErrorBuilder.withException(e).build());
}
return builder.build();
}
Aggregations