Search in sources :

Example 16 with S3Configuration

use of com.ibm.cohort.tooling.s3.S3Configuration in project teamcity-s3-artifact-storage-plugin by JetBrains.

the class S3ArtifactsPublisherTest method testManyFilesAreCorrectlySendToConsumer.

public void testManyFilesAreCorrectlySendToConsumer() throws IOException {
    AgentRunningBuild build = Mockito.mock(AgentRunningBuild.class);
    HashMap<String, String> storageSettings = new HashMap<>();
    storageSettings.put("aws.region.name", "test");
    storageSettings.put("secure:aws.secret.access.key", "test");
    storageSettings.put("aws.access.key.id", "test");
    storageSettings.put("aws.credentials.type", "aws.access.keys");
    storageSettings.put("storage.s3.bucket.name", "BUCKET_NAME");
    storageSettings.put("aws.environment", "custom");
    storageSettings.put("aws.service.endpoint", "http://localhost");
    when(build.getSharedConfigParameters()).thenReturn(Collections.emptyMap());
    when(build.getArtifactStorageSettings()).thenReturn(storageSettings);
    when(build.getBuildLogger()).thenReturn(Mockito.mock(BuildProgressLogger.class, RETURNS_DEEP_STUBS));
    CurrentBuildTracker tracker = Mockito.mock(CurrentBuildTracker.class);
    when(tracker.getCurrentBuild()).thenReturn(build);
    BuildAgentConfiguration config = Mockito.mock(BuildAgentConfiguration.class);
    when(config.getAgentHomeDirectory()).thenReturn(new File(""));
    PresignedUrlsProviderClientFactory clientFactory = Mockito.mock(PresignedUrlsProviderClientFactory.class);
    ExtensionHolder holder = Mockito.mock(ExtensionHolder.class);
    EventDispatcher<AgentLifeCycleListener> dispatcher = EventDispatcher.create(AgentLifeCycleListener.class);
    S3FileUploaderFactory uploaderFactory = Mockito.mock(S3FileUploaderFactory.class);
    final S3Configuration s3Configuration = Mockito.mock(S3Configuration.class);
    final S3UploadLogger s3UploadLogger = Mockito.mock(S3UploadLogger.class);
    HashMap<File, String> artifacts = new HashMap<>();
    List<FileUploadInfo> uploadInfos = new ArrayList<>();
    for (int i = 0; i < 500; i++) {
        final File testFile = Files.createTempFile("test", String.valueOf(i)).toFile();
        artifacts.put(testFile, "");
        uploadInfos.add(new FileUploadInfo(testFile.getName(), testFile.getAbsolutePath(), testFile.length(), null));
    }
    S3FileUploader uploader = new S3FileUploader(s3Configuration, s3UploadLogger) {

        @Override
        public Collection<UploadStatistics> upload(@NotNull Map<File, String> filesToUpload, @NotNull Supplier<String> interrupter, Consumer<FileUploadInfo> uploadInfoConsumer) throws InvalidSettingsException {
            uploadInfos.stream().parallel().forEach(i -> uploadInfoConsumer.accept(i));
            return null;
        }
    };
    when(uploaderFactory.create(any(), any(), any())).thenReturn(uploader);
    AgentArtifactHelper helper = new AgentArtifactHelper() {

        @Override
        public void publishArtifactList(@NotNull List<ArtifactDataInstance> artifacts, @Nullable Map<String, String> commonProperties) {
            assertEmpty(artifacts.stream().filter(Objects::isNull).collect(Collectors.toList()), "Should not contain any nulls");
        }

        @Override
        public boolean isEnabled(@NotNull ArtifactsPublisher publisher, @NotNull String path) {
            return true;
        }
    };
    S3ArtifactsPublisher publisher = new S3ArtifactsPublisher(helper, dispatcher, tracker, config, clientFactory, uploaderFactory, holder);
    publisher.publishFiles(artifacts);
}
Also used : NotNull(org.jetbrains.annotations.NotNull) Consumer(java.util.function.Consumer) FileUploadInfo(jetbrains.buildServer.artifacts.s3.FileUploadInfo) Supplier(java.util.function.Supplier) AgentArtifactHelper(jetbrains.buildServer.agent.artifacts.AgentArtifactHelper) S3UploadLogger(jetbrains.buildServer.artifacts.s3.publish.logger.S3UploadLogger) PresignedUrlsProviderClientFactory(jetbrains.buildServer.artifacts.s3.publish.presigned.upload.PresignedUrlsProviderClientFactory) S3Configuration(jetbrains.buildServer.artifacts.s3.S3Configuration) ExtensionHolder(jetbrains.buildServer.ExtensionHolder) File(java.io.File) Nullable(org.jetbrains.annotations.Nullable)

Example 17 with S3Configuration

use of com.ibm.cohort.tooling.s3.S3Configuration in project quality-measure-and-cohort-service by Alvearie.

the class ValueSetImporter method runWithArgs.

void runWithArgs(String[] args, PrintStream out) throws IOException {
    ValueSetImporterArguments arguments = new ValueSetImporterArguments();
    Console console = new DefaultConsole(out);
    JCommander jc = JCommander.newBuilder().programName("value-set-importer").console(console).addObject(arguments).build();
    jc.parse(args);
    if (arguments.isDisplayHelp) {
        jc.usage();
    } else {
        arguments.validate();
        FhirContext fhirContext = FhirContext.forR4();
        // only connect to fhir server if we are not writing it to file system
        IGenericClient client = null;
        ObjectMapper om = new ObjectMapper();
        if (arguments.fileOutputLocation == OutputLocations.NONE) {
            FhirServerConfig config = om.readValue(arguments.measureServerConfigFile, FhirServerConfig.class);
            client = FhirClientBuilderFactory.newInstance().newFhirClientBuilder(fhirContext).createFhirClient(config);
        }
        Map<String, String> codeSystemMappings = null;
        if (arguments.filename != null) {
            codeSystemMappings = ValueSetUtil.getMapFromInputStream(new FileInputStream(new File(arguments.filename)));
        }
        for (String arg : arguments.spreadsheets) {
            try (InputStream is = new FileInputStream(arg)) {
                ValueSetArtifact artifact = ValueSetUtil.createArtifact(is, codeSystemMappings);
                // only import the value set to fhir server if we are not writing the value set to file system
                if (arguments.fileOutputLocation == OutputLocations.NONE) {
                    String retVal = ValueSetUtil.importArtifact(client, artifact, arguments.overrideValueSets);
                    if (retVal == null) {
                        logger.error("Value set already exists! Please provide the override option if you would like to override this value set.");
                    }
                } else {
                    // write value set to file system
                    ValueSet vs = artifact.getFhirResource();
                    // If the valueset id contains urn:oid, remove it to make a valid filename
                    String valueSetId = vs.getId().startsWith("urn:oid:") ? vs.getId().replace("urn:oid:", "") : vs.getId();
                    String vsFileName = valueSetId + "." + arguments.filesystemOutputFormat.toString().toLowerCase();
                    if (arguments.fileOutputLocation == OutputLocations.BOTH || arguments.fileOutputLocation == OutputLocations.S3) {
                        S3Configuration S3Config = om.readValue(arguments.S3JsonConfigs, S3Configuration.class);
                        AmazonS3 S3Client = createClient(S3Config.getAccess_key_id(), S3Config.getSecret_access_key(), S3Config.getEndpoint(), S3Config.getLocation());
                        putToS3(arguments, fhirContext, vs, vsFileName, S3Client);
                    }
                    if (arguments.fileOutputLocation == OutputLocations.BOTH || arguments.fileOutputLocation == OutputLocations.LOCAL) {
                        try (BufferedWriter writer = new BufferedWriter(new FileWriter(arguments.fileSystemOutputPath + System.getProperty("file.separator") + vsFileName))) {
                            // create the output dir if it doesn't exist
                            File outputDir = new File(arguments.fileSystemOutputPath);
                            if (!outputDir.exists()) {
                                outputDir.mkdir();
                            }
                            // write to xml or json format
                            if (arguments.filesystemOutputFormat == FileFormat.JSON) {
                                fhirContext.newJsonParser().encodeResourceToWriter(vs, writer);
                            } else if (arguments.filesystemOutputFormat == FileFormat.XML) {
                                fhirContext.newXmlParser().encodeResourceToWriter(vs, writer);
                            }
                        }
                    }
                }
            }
        }
    }
}
Also used : AmazonS3(com.amazonaws.services.s3.AmazonS3) FhirContext(ca.uhn.fhir.context.FhirContext) DefaultConsole(com.beust.jcommander.internal.DefaultConsole) IGenericClient(ca.uhn.fhir.rest.client.api.IGenericClient) ByteArrayInputStream(java.io.ByteArrayInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) FileWriter(java.io.FileWriter) ValueSetArtifact(com.ibm.cohort.valueset.ValueSetArtifact) FileInputStream(java.io.FileInputStream) BufferedWriter(java.io.BufferedWriter) S3Configuration(com.ibm.cohort.tooling.s3.S3Configuration) JCommander(com.beust.jcommander.JCommander) Console(com.beust.jcommander.internal.Console) DefaultConsole(com.beust.jcommander.internal.DefaultConsole) FhirServerConfig(com.ibm.cohort.fhir.client.config.FhirServerConfig) File(java.io.File) ValueSet(org.hl7.fhir.r4.model.ValueSet) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 18 with S3Configuration

use of com.ibm.cohort.tooling.s3.S3Configuration in project ZarrReader by ome.

the class S3FileSystemStore method setupClient.

private void setupClient() {
    String[] pathSplit = root.toString().split(File.separator);
    String endpoint = "https://" + pathSplit[1] + File.separator;
    URI endpoint_uri;
    try {
        endpoint_uri = new URI(endpoint);
        final S3Configuration config = S3Configuration.builder().pathStyleAccessEnabled(true).build();
        AwsCredentials credentials = AnonymousCredentialsProvider.create().resolveCredentials();
        client = S3Client.builder().endpointOverride(endpoint_uri).serviceConfiguration(config).region(// Ignored but required by the client
        Region.EU_WEST_1).credentialsProvider(StaticCredentialsProvider.create(credentials)).build();
    } catch (URISyntaxException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (Exception e) {
    // TODO Auto-generated catch block
    // e.printStackTrace();
    }
}
Also used : S3Configuration(software.amazon.awssdk.services.s3.S3Configuration) AwsCredentials(software.amazon.awssdk.auth.credentials.AwsCredentials) URISyntaxException(java.net.URISyntaxException) URI(java.net.URI) URISyntaxException(java.net.URISyntaxException) IOException(java.io.IOException)

Example 19 with S3Configuration

use of com.ibm.cohort.tooling.s3.S3Configuration in project syndesis by syndesisio.

the class AWSS3VerifierExtension method verifyConnectivity.

// *********************************
// Connectivity validation
// *********************************
@Override
protected Result verifyConnectivity(Map<String, Object> parameters) {
    ResultBuilder builder = ResultBuilder.withStatusAndScope(Result.Status.OK, Scope.CONNECTIVITY);
    try {
        S3Configuration configuration = setProperties(new S3Configuration(), parameters);
        AWSCredentials credentials = new BasicAWSCredentials(configuration.getAccessKey(), configuration.getSecretKey());
        AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(credentials);
        AmazonS3 client = AmazonS3ClientBuilder.standard().withCredentials(credentialsProvider).withRegion(Regions.valueOf(configuration.getRegion())).build();
        client.listBuckets();
    } catch (SdkClientException e) {
        ResultErrorBuilder errorBuilder = ResultErrorBuilder.withCodeAndDescription(VerificationError.StandardCode.AUTHENTICATION, e.getMessage()).detail("aws_s3_exception_message", e.getMessage()).detail(VerificationError.ExceptionAttribute.EXCEPTION_CLASS, e.getClass().getName()).detail(VerificationError.ExceptionAttribute.EXCEPTION_INSTANCE, e);
        builder.error(errorBuilder.build());
    } catch (Exception e) {
        builder.error(ResultErrorBuilder.withException(e).build());
    }
    return builder.build();
}
Also used : ResultBuilder(org.apache.camel.component.extension.verifier.ResultBuilder) AmazonS3(com.amazonaws.services.s3.AmazonS3) AWSStaticCredentialsProvider(com.amazonaws.auth.AWSStaticCredentialsProvider) S3Configuration(org.apache.camel.component.aws.s3.S3Configuration) SdkClientException(com.amazonaws.SdkClientException) ResultErrorBuilder(org.apache.camel.component.extension.verifier.ResultErrorBuilder) BasicAWSCredentials(com.amazonaws.auth.BasicAWSCredentials) AWSCredentials(com.amazonaws.auth.AWSCredentials) BasicAWSCredentials(com.amazonaws.auth.BasicAWSCredentials) AWSCredentialsProvider(com.amazonaws.auth.AWSCredentialsProvider) SdkClientException(com.amazonaws.SdkClientException)

Aggregations

S3Configuration (software.amazon.awssdk.services.s3.S3Configuration)12 URI (java.net.URI)5 S3Configuration (jetbrains.buildServer.artifacts.s3.S3Configuration)4 File (java.io.File)3 NotNull (org.jetbrains.annotations.NotNull)3 Test (org.junit.jupiter.api.Test)3 ConfiguredS3SdkHttpRequest (software.amazon.awssdk.services.s3.internal.ConfiguredS3SdkHttpRequest)3 AmazonS3 (com.amazonaws.services.s3.AmazonS3)2 Consumer (java.util.function.Consumer)2 Supplier (java.util.function.Supplier)2 ExtensionHolder (jetbrains.buildServer.ExtensionHolder)2 AgentArtifactHelper (jetbrains.buildServer.agent.artifacts.AgentArtifactHelper)2 FileUploadInfo (jetbrains.buildServer.artifacts.s3.FileUploadInfo)2 S3UploadLogger (jetbrains.buildServer.artifacts.s3.publish.logger.S3UploadLogger)2 PresignedUrlsProviderClientFactory (jetbrains.buildServer.artifacts.s3.publish.presigned.upload.PresignedUrlsProviderClientFactory)2 S3Configuration (org.apache.camel.component.aws.s3.S3Configuration)2 SdkHttpRequest (software.amazon.awssdk.http.SdkHttpRequest)2 Region (software.amazon.awssdk.regions.Region)2 S3Request (software.amazon.awssdk.services.s3.model.S3Request)2 Http (akka.http.scaladsl.Http)1