Search in sources :

Example 1 with AmazonElasticMapReduce

use of com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce in project aws-doc-sdk-examples by awsdocs.

the class Main method main.

public static void main(String[] args) {
    AWSCredentials credentials_profile = null;
    try {
        credentials_profile = new ProfileCredentialsProvider("default").getCredentials();
    } catch (Exception e) {
        throw new AmazonClientException("Cannot load credentials from .aws/credentials file. " + "Make sure that the credentials file exists and the profile name is specified within it.", e);
    }
    AmazonElasticMapReduce emr = AmazonElasticMapReduceClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(credentials_profile)).withRegion(Regions.US_WEST_1).build();
    // Run a bash script using a predefined step in the StepFactory helper class
    StepFactory stepFactory = new StepFactory();
    StepConfig runBashScript = new StepConfig().withName("Run a bash script").withHadoopJarStep(stepFactory.newScriptRunnerStep("s3://jeffgoll/emr-scripts/create_users.sh")).withActionOnFailure("CONTINUE");
    // Run a custom jar file as a step
    HadoopJarStepConfig hadoopConfig1 = new HadoopJarStepConfig().withJar(// replace with the location of the jar to run as a step
    "s3://path/to/my/jarfolder").withMainClass(// optional main class, this can be omitted if jar above has a manifest
    "com.my.Main1").withArgs(// optional list of arguments to pass to the jar
    "--verbose");
    StepConfig myCustomJarStep = new StepConfig("RunHadoopJar", hadoopConfig1);
    AddJobFlowStepsResult result = emr.addJobFlowSteps(new AddJobFlowStepsRequest().withJobFlowId(// replace with cluster id to run the steps
    "j-xxxxxxxxxxxx").withSteps(runBashScript, myCustomJarStep));
    System.out.println(result.getStepIds());
}
Also used : AWSStaticCredentialsProvider(com.amazonaws.auth.AWSStaticCredentialsProvider) AmazonClientException(com.amazonaws.AmazonClientException) ProfileCredentialsProvider(com.amazonaws.auth.profile.ProfileCredentialsProvider) StepFactory(com.amazonaws.services.elasticmapreduce.util.StepFactory) AWSCredentials(com.amazonaws.auth.AWSCredentials) AmazonClientException(com.amazonaws.AmazonClientException) AmazonElasticMapReduce(com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce)

Example 2 with AmazonElasticMapReduce

use of com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce in project aws-doc-sdk-examples by awsdocs.

the class Main_test method main.

public static void main(String[] args) {
    AWSCredentials credentials_profile = null;
    try {
        credentials_profile = new ProfileCredentialsProvider("default").getCredentials();
    } catch (Exception e) {
        throw new AmazonClientException("Cannot load credentials from .aws/credentials file. " + "Make sure that the credentials file exists and the profile name is specified within it.", e);
    }
    AmazonElasticMapReduce emr = AmazonElasticMapReduceClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(credentials_profile)).withRegion(Regions.US_WEST_1).build();
    List<StepConfig> stepConfigs = new ArrayList<StepConfig>();
    HadoopJarStepConfig flinkWordCountConf = new HadoopJarStepConfig().withJar("command-runner.jar").withArgs("bash", "-c", "flink", "run", "-m", "yarn-cluster", "-yn", "2", "/usr/lib/flink/examples/streaming/WordCount.jar", "--input", "s3://path/to/input-file.txt", "--output", "s3://path/to/output/");
    StepConfig flinkRunWordCountStep = new StepConfig().withName("Flink add a wordcount step and terminate").withActionOnFailure("CONTINUE").withHadoopJarStep(flinkWordCountConf);
    stepConfigs.add(flinkRunWordCountStep);
    Application flink = new Application().withName("Flink");
    RunJobFlowRequest request = new RunJobFlowRequest().withName("flink-transient").withReleaseLabel("emr-5.20.0").withApplications(flink).withServiceRole("EMR_DefaultRole").withJobFlowRole("EMR_EC2_DefaultRole").withLogUri("s3://path/to/my/logfiles").withInstances(new JobFlowInstancesConfig().withEc2KeyName("myEc2Key").withEc2SubnetId("subnet-12ab3c45").withInstanceCount(3).withKeepJobFlowAliveWhenNoSteps(false).withMasterInstanceType("m4.large").withSlaveInstanceType("m4.large")).withSteps(stepConfigs);
    RunJobFlowResult result = emr.runJobFlow(request);
    System.out.println("The cluster ID is " + result.toString());
}
Also used : AmazonClientException(com.amazonaws.AmazonClientException) ArrayList(java.util.ArrayList) AWSCredentials(com.amazonaws.auth.AWSCredentials) AmazonClientException(com.amazonaws.AmazonClientException) AmazonElasticMapReduce(com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce) AWSStaticCredentialsProvider(com.amazonaws.auth.AWSStaticCredentialsProvider) ProfileCredentialsProvider(com.amazonaws.auth.profile.ProfileCredentialsProvider)

Aggregations

AmazonClientException (com.amazonaws.AmazonClientException)2 AWSCredentials (com.amazonaws.auth.AWSCredentials)2 AWSStaticCredentialsProvider (com.amazonaws.auth.AWSStaticCredentialsProvider)2 ProfileCredentialsProvider (com.amazonaws.auth.profile.ProfileCredentialsProvider)2 AmazonElasticMapReduce (com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce)2 StepFactory (com.amazonaws.services.elasticmapreduce.util.StepFactory)1 ArrayList (java.util.ArrayList)1