Search in sources :

Example 31 with AmazonS3Client

use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3Client in project bender by Nextdoor.

the class S3TransporterTest method testCompressedPartitoned.

@Test
public void testCompressedPartitoned() throws TransportException, IllegalStateException, IOException {
    /*
     * Create mock client, requets, and replies
     */
    AmazonS3Client mockClient = getMockClient();
    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, true, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();
    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath", true, multiPartUploads);
    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    partitions.put(S3Transport.FILENAME_KEY, "a_filename");
    partitions.put("day", "01");
    partitions.put("hour", "23");
    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    transport.sendBatch(buffer, partitions, new TestContext());
    verify(mockClient).uploadPart(argument.capture());
    /*
     * Check results
     */
    assertEquals("bucket", argument.getValue().getBucketName());
    assertEquals("basepath/day=01/hour=23/a_filename.bz2", argument.getValue().getKey());
    assertEquals(1, argument.getValue().getPartNumber());
    assertEquals(3, argument.getValue().getPartSize());
    assertEquals("123", argument.getValue().getUploadId());
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) TestContext(com.nextdoor.bender.aws.TestContext) UploadPartRequest(com.amazonaws.services.s3.model.UploadPartRequest) InternalEvent(com.nextdoor.bender.InternalEvent) LinkedHashMap(java.util.LinkedHashMap) AmazonS3Client(com.amazonaws.services.s3.AmazonS3Client) Test(org.junit.Test)

Example 32 with AmazonS3Client

use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3Client in project bender by Nextdoor.

the class S3TransporterTest method testContextBasedFilename.

@Test
public void testContextBasedFilename() throws TransportException, IllegalStateException, IOException {
    /*
     * Create mock client, requests, and replies
     */
    AmazonS3Client mockClient = getMockClient();
    /*
     * Fill buffer with mock data
     */
    S3TransportBuffer buffer = new S3TransportBuffer(1000, true, new S3TransportSerializer());
    InternalEvent mockIevent = mock(InternalEvent.class);
    doReturn("foo").when(mockIevent).getSerialized();
    /*
     * Create transport
     */
    Map<String, MultiPartUpload> multiPartUploads = new HashMap<String, MultiPartUpload>(0);
    S3Transport transport = new S3Transport(mockClient, "bucket", "basepath/", true, multiPartUploads);
    /*
     * Do actual test
     */
    buffer.add(mockIevent);
    LinkedHashMap<String, String> partitions = new LinkedHashMap<String, String>();
    ArgumentCaptor<UploadPartRequest> argument = ArgumentCaptor.forClass(UploadPartRequest.class);
    TestContext context = new TestContext();
    context.setAwsRequestId("request_id");
    transport.sendBatch(buffer, partitions, context);
    verify(mockClient).uploadPart(argument.capture());
    /*
     * Check results
     */
    assertEquals("basepath/request_id.bz2", argument.getValue().getKey());
}
Also used : HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) TestContext(com.nextdoor.bender.aws.TestContext) UploadPartRequest(com.amazonaws.services.s3.model.UploadPartRequest) InternalEvent(com.nextdoor.bender.InternalEvent) LinkedHashMap(java.util.LinkedHashMap) AmazonS3Client(com.amazonaws.services.s3.AmazonS3Client) Test(org.junit.Test)

Example 33 with AmazonS3Client

use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3Client in project bender by Nextdoor.

the class S3SnsNotifier method main.

public static void main(String[] args) throws ParseException, InterruptedException, IOException {
    formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZoneUTC();
    /*
     * Parse cli arguments
     */
    Options options = new Options();
    options.addOption(Option.builder().longOpt("bucket").hasArg().required().desc("Name of S3 bucket to list s3 objects from").build());
    options.addOption(Option.builder().longOpt("key-file").hasArg().required().desc("Local file of S3 keys to process").build());
    options.addOption(Option.builder().longOpt("sns-arn").hasArg().required().desc("SNS arn to publish to").build());
    options.addOption(Option.builder().longOpt("throttle-ms").hasArg().desc("Amount of ms to wait between publishing to SNS").build());
    options.addOption(Option.builder().longOpt("processed-file").hasArg().desc("Local file to use to store procssed S3 object names").build());
    options.addOption(Option.builder().longOpt("skip-processed").hasArg(false).desc("Whether to skip S3 objects that have been processed").build());
    options.addOption(Option.builder().longOpt("dry-run").hasArg(false).desc("If set do not publish to SNS").build());
    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = parser.parse(options, args);
    String bucket = cmd.getOptionValue("bucket");
    String keyFile = cmd.getOptionValue("key-file");
    String snsArn = cmd.getOptionValue("sns-arn");
    String processedFile = cmd.getOptionValue("processed-file", null);
    boolean skipProcessed = cmd.hasOption("skip-processed");
    dryRun = cmd.hasOption("dry-run");
    long throttle = Long.parseLong(cmd.getOptionValue("throttle-ms", "-1"));
    if (processedFile != null) {
        File file = new File(processedFile);
        if (!file.exists()) {
            logger.debug("creating local file to store processed s3 object names: " + processedFile);
            file.createNewFile();
        }
    }
    /*
     * Import S3 keys that have been processed
     */
    if (skipProcessed && processedFile != null) {
        try (BufferedReader br = new BufferedReader(new FileReader(processedFile))) {
            String line;
            while ((line = br.readLine()) != null) {
                alreadyPublished.add(line.trim());
            }
        }
    }
    /*
     * Setup writer for file containing processed S3 keys
     */
    FileWriter fw = null;
    BufferedWriter bw = null;
    if (processedFile != null) {
        fw = new FileWriter(processedFile, true);
        bw = new BufferedWriter(fw);
    }
    /*
     * Create clients
     */
    AmazonS3Client s3Client = new AmazonS3Client();
    AmazonSNSClient snsClient = new AmazonSNSClient();
    /*
     * Get S3 object list
     */
    try (BufferedReader br = new BufferedReader(new FileReader(keyFile))) {
        String line;
        while ((line = br.readLine()) != null) {
            String key = line.trim();
            if (alreadyPublished.contains(key)) {
                logger.info("skipping " + key);
            }
            ObjectMetadata om = s3Client.getObjectMetadata(bucket, key);
            S3EventNotification s3Notification = getS3Notification(key, bucket, om.getContentLength());
            String json = s3Notification.toJson();
            /*
         * Publish to SNS
         */
            if (publish(snsArn, json, snsClient, key) && processedFile != null) {
                bw.write(key + "\n");
                bw.flush();
            }
            if (throttle != -1) {
                Thread.sleep(throttle);
            }
        }
    }
    if (processedFile != null) {
        bw.close();
        fw.close();
    }
}
Also used : Options(org.apache.commons.cli.Options) FileWriter(java.io.FileWriter) AmazonSNSClient(com.amazonaws.services.sns.AmazonSNSClient) BufferedWriter(java.io.BufferedWriter) S3EventNotification(com.amazonaws.services.s3.event.S3EventNotification) CommandLine(org.apache.commons.cli.CommandLine) AmazonS3Client(com.amazonaws.services.s3.AmazonS3Client) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) CommandLineParser(org.apache.commons.cli.CommandLineParser) File(java.io.File) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata) DefaultParser(org.apache.commons.cli.DefaultParser)

Example 34 with AmazonS3Client

use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3Client in project Synapse-Stack-Builder by Sage-Bionetworks.

the class BuildStackMainTest method before.

@Before
public void before() throws IOException {
    inputProps = TestHelper.createInputProperties("dev");
    InputConfiguration config = TestHelper.createTestConfig("dev");
    defaultProps = TestHelper.createDefaultProperties();
    clientFactory = new MockAmazonClientFactory();
    AmazonS3Client mockS3Client = clientFactory.createS3Client();
    AmazonEC2Client mockEC2Client = clientFactory.createEC2Client();
    AmazonSNSClient mockSNSnsClient = clientFactory.createSNSClient();
    AmazonRDSClient mockRdsClient = clientFactory.createRDSClient();
    // Write the default properties.
    when(mockS3Client.getObject(any(GetObjectRequest.class), any(File.class))).thenAnswer(new Answer<ObjectMetadata>() {

        public ObjectMetadata answer(InvocationOnMock invocation) throws Throwable {
            // Write the property file
            File file = (File) invocation.getArguments()[1];
            FileWriter writer = new FileWriter(file);
            try {
                defaultProps.store(writer, "test generated");
            } finally {
                writer.close();
            }
            return new ObjectMetadata();
        }
    });
    // Return a valid EC2 security group.
    DescribeSecurityGroupsRequest dsgr = new DescribeSecurityGroupsRequest().withGroupNames(config.getElasticSecurityGroupName());
    when(mockEC2Client.describeSecurityGroups(dsgr)).thenReturn(new DescribeSecurityGroupsResult().withSecurityGroups(new SecurityGroup().withGroupName(config.getElasticSecurityGroupName())));
    // Return a valid topic
    String topicArn = "some:arn";
    when(mockSNSnsClient.createTopic(new CreateTopicRequest(config.getRDSAlertTopicName()))).thenReturn(new CreateTopicResult().withTopicArn(topicArn));
    when(mockSNSnsClient.listSubscriptionsByTopic(new ListSubscriptionsByTopicRequest(topicArn))).thenReturn(new ListSubscriptionsByTopicResult().withSubscriptions(new Subscription()));
    // return a valid group
    when(mockRdsClient.describeDBParameterGroups(new DescribeDBParameterGroupsRequest().withDBParameterGroupName(config.getDatabaseParameterGroupName()))).thenReturn(new DescribeDBParameterGroupsResult().withDBParameterGroups(new DBParameterGroup().withDBParameterGroupName(config.getDatabaseParameterGroupName())));
    when(mockRdsClient.describeDBParameters(new DescribeDBParametersRequest().withDBParameterGroupName(config.getDatabaseParameterGroupName()))).thenReturn(new DescribeDBParametersResult().withParameters(new Parameter().withParameterName(Constants.DB_PARAM_KEY_SLOW_QUERY_LOG)).withParameters(new Parameter().withParameterName(Constants.DB_PARAM_KEY_LONG_QUERY_TIME)));
}
Also used : DescribeSecurityGroupsRequest(com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest) DescribeDBParameterGroupsResult(com.amazonaws.services.rds.model.DescribeDBParameterGroupsResult) FileWriter(java.io.FileWriter) AmazonSNSClient(com.amazonaws.services.sns.AmazonSNSClient) CreateTopicResult(com.amazonaws.services.sns.model.CreateTopicResult) DescribeDBParametersRequest(com.amazonaws.services.rds.model.DescribeDBParametersRequest) DescribeDBParameterGroupsRequest(com.amazonaws.services.rds.model.DescribeDBParameterGroupsRequest) DescribeSecurityGroupsResult(com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult) Subscription(com.amazonaws.services.sns.model.Subscription) GetObjectRequest(com.amazonaws.services.s3.model.GetObjectRequest) AmazonEC2Client(com.amazonaws.services.ec2.AmazonEC2Client) DBParameterGroup(com.amazonaws.services.rds.model.DBParameterGroup) ListSubscriptionsByTopicRequest(com.amazonaws.services.sns.model.ListSubscriptionsByTopicRequest) DescribeDBParametersResult(com.amazonaws.services.rds.model.DescribeDBParametersResult) CreateTopicRequest(com.amazonaws.services.sns.model.CreateTopicRequest) MockAmazonClientFactory(org.sagebionetworks.factory.MockAmazonClientFactory) AmazonRDSClient(com.amazonaws.services.rds.AmazonRDSClient) SecurityGroup(com.amazonaws.services.ec2.model.SecurityGroup) InputConfiguration(org.sagebionetworks.stack.config.InputConfiguration) AmazonS3Client(com.amazonaws.services.s3.AmazonS3Client) InvocationOnMock(org.mockito.invocation.InvocationOnMock) ListSubscriptionsByTopicResult(com.amazonaws.services.sns.model.ListSubscriptionsByTopicResult) Parameter(com.amazonaws.services.rds.model.Parameter) File(java.io.File) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata) Before(org.junit.Before)

Example 35 with AmazonS3Client

use of com.talend.shaded.com.amazonaws.services.s3.AmazonS3Client in project opentest by mcdcorp.

the class GetS3Object method run.

@Override
public void run() {
    super.run();
    String awsCredentialsProfile = this.readStringArgument("awsProfile", "default");
    String bucket = this.readStringArgument("bucket");
    String objectKey = this.readStringArgument("objectKey");
    String targetFilePath = this.readStringArgument("targetFile");
    Boolean overwrite = this.readBooleanArgument("overwrite", false);
    AmazonS3 s3Client = new AmazonS3Client(new ProfileCredentialsProvider(awsCredentialsProfile));
    S3Object object = s3Client.getObject(new GetObjectRequest(bucket, objectKey));
    InputStream objectDataStream = object.getObjectContent();
    if (targetFilePath != null) {
        File targetFile = new File(targetFilePath);
        if (!targetFile.isAbsolute()) {
            targetFile = Paths.get(this.getActor().getTempDir().getAbsolutePath(), targetFilePath).toFile();
        }
        targetFile.getParentFile().mkdirs();
        try {
            if (overwrite) {
                Files.copy(objectDataStream, targetFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
            } else {
                Files.copy(objectDataStream, targetFile.toPath());
            }
        } catch (Exception ex) {
            throw new RuntimeException(String.format("Failed to transfer data from the input stream into file %s", targetFilePath), ex);
        }
        this.writeArgument("targetFile", targetFile.getAbsolutePath());
    } else {
    // TODO: Make targetFile arg optional so this branch can execute.
    // Read data in memory and write it to an output value
    }
}
Also used : AmazonS3(com.amazonaws.services.s3.AmazonS3) AmazonS3Client(com.amazonaws.services.s3.AmazonS3Client) InputStream(java.io.InputStream) ProfileCredentialsProvider(com.amazonaws.auth.profile.ProfileCredentialsProvider) S3Object(com.amazonaws.services.s3.model.S3Object) GetObjectRequest(com.amazonaws.services.s3.model.GetObjectRequest) File(java.io.File)

Aggregations

AmazonS3Client (com.amazonaws.services.s3.AmazonS3Client)107 Test (org.junit.Test)23 BasicAWSCredentials (com.amazonaws.auth.BasicAWSCredentials)20 AmazonClientException (com.amazonaws.AmazonClientException)16 ClientConfiguration (com.amazonaws.ClientConfiguration)15 ArrayList (java.util.ArrayList)13 HashMap (java.util.HashMap)13 AmazonS3 (com.amazonaws.services.s3.AmazonS3)12 File (java.io.File)12 InvocationOnMock (org.mockito.invocation.InvocationOnMock)12 PutObjectResult (com.amazonaws.services.s3.model.PutObjectResult)11 UploadPartRequest (com.amazonaws.services.s3.model.UploadPartRequest)11 AWSCredentials (com.amazonaws.auth.AWSCredentials)10 AWSCredentialsProvider (com.amazonaws.auth.AWSCredentialsProvider)10 ObjectMetadata (com.amazonaws.services.s3.model.ObjectMetadata)10 AmazonServiceException (com.amazonaws.AmazonServiceException)9 AmazonS3Exception (com.amazonaws.services.s3.model.AmazonS3Exception)9 InternalEvent (com.nextdoor.bender.InternalEvent)9 TestContext (com.nextdoor.bender.aws.TestContext)9 IOException (java.io.IOException)9