use of com.google.cloud.dataproc.v1beta2.JobControllerClient in project java-dataproc by googleapis.
the class SubmitJob method submitJob.
public static void submitJob(String projectId, String region, String clusterName) throws IOException, InterruptedException {
String myEndpoint = String.format("%s-dataproc.googleapis.com:443", region);
// Configure the settings for the job controller client.
JobControllerSettings jobControllerSettings = JobControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
// but this can also be done manually with the .close() method.
try (JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings)) {
// Configure cluster placement for the job.
JobPlacement jobPlacement = JobPlacement.newBuilder().setClusterName(clusterName).build();
// Configure Spark job settings.
SparkJob sparkJob = SparkJob.newBuilder().setMainClass("org.apache.spark.examples.SparkPi").addJarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar").addArgs("1000").build();
Job job = Job.newBuilder().setPlacement(jobPlacement).setSparkJob(sparkJob).build();
// Submit an asynchronous request to execute the job.
OperationFuture<Job, JobMetadata> submitJobAsOperationAsyncRequest = jobControllerClient.submitJobAsOperationAsync(projectId, region, job);
Job response = submitJobAsOperationAsyncRequest.get();
// Print output from Google Cloud Storage.
Matcher matches = Pattern.compile("gs://(.*?)/(.*)").matcher(response.getDriverOutputResourceUri());
matches.matches();
Storage storage = StorageOptions.getDefaultInstance().getService();
Blob blob = storage.get(matches.group(1), String.format("%s.000000000", matches.group(2)));
System.out.println(String.format("Job finished successfully: %s", new String(blob.getContent())));
} catch (ExecutionException e) {
// If the job does not complete successfully, print the error message.
System.err.println(String.format("submitJob: %s ", e.getMessage()));
}
}
use of com.google.cloud.dataproc.v1beta2.JobControllerClient in project cdap by cdapio.
the class DataprocRuntimeJobManager method getJobControllerClient.
/**
* Returns a {@link JobControllerClient} to interact with Dataproc Job API.
*/
private JobControllerClient getJobControllerClient() throws IOException {
JobControllerClient client = jobControllerClient;
if (client != null) {
return client;
}
synchronized (this) {
client = jobControllerClient;
if (client != null) {
return client;
}
// instantiate a dataproc job controller client
CredentialsProvider credentialsProvider = FixedCredentialsProvider.create(credentials);
this.jobControllerClient = client = JobControllerClient.create(JobControllerSettings.newBuilder().setCredentialsProvider(credentialsProvider).setEndpoint(region + "-" + endpoint).build());
}
return client;
}
use of com.google.cloud.dataproc.v1beta2.JobControllerClient in project cdap by caskdata.
the class DataprocRuntimeJobManager method getJobControllerClient.
/**
* Returns a {@link JobControllerClient} to interact with Dataproc Job API.
*/
private JobControllerClient getJobControllerClient() throws IOException {
JobControllerClient client = jobControllerClient;
if (client != null) {
return client;
}
synchronized (this) {
client = jobControllerClient;
if (client != null) {
return client;
}
// instantiate a dataproc job controller client
CredentialsProvider credentialsProvider = FixedCredentialsProvider.create(credentials);
this.jobControllerClient = client = JobControllerClient.create(JobControllerSettings.newBuilder().setCredentialsProvider(credentialsProvider).setEndpoint(region + "-" + endpoint).build());
}
return client;
}
use of com.google.cloud.dataproc.v1beta2.JobControllerClient in project java-dataproc by googleapis.
the class Quickstart method quickstart.
public static void quickstart(String projectId, String region, String clusterName, String jobFilePath) throws IOException, InterruptedException {
String myEndpoint = String.format("%s-dataproc.googleapis.com:443", region);
// Configure the settings for the cluster controller client.
ClusterControllerSettings clusterControllerSettings = ClusterControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
// Configure the settings for the job controller client.
JobControllerSettings jobControllerSettings = JobControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
// manually with the .close() method.
try (ClusterControllerClient clusterControllerClient = ClusterControllerClient.create(clusterControllerSettings);
JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings)) {
// Configure the settings for our cluster.
InstanceGroupConfig masterConfig = InstanceGroupConfig.newBuilder().setMachineTypeUri("n1-standard-2").setNumInstances(1).build();
InstanceGroupConfig workerConfig = InstanceGroupConfig.newBuilder().setMachineTypeUri("n1-standard-2").setNumInstances(2).build();
ClusterConfig clusterConfig = ClusterConfig.newBuilder().setMasterConfig(masterConfig).setWorkerConfig(workerConfig).build();
// Create the cluster object with the desired cluster config.
Cluster cluster = Cluster.newBuilder().setClusterName(clusterName).setConfig(clusterConfig).build();
// Create the Cloud Dataproc cluster.
OperationFuture<Cluster, ClusterOperationMetadata> createClusterAsyncRequest = clusterControllerClient.createClusterAsync(projectId, region, cluster);
Cluster clusterResponse = createClusterAsyncRequest.get();
System.out.println(String.format("Cluster created successfully: %s", clusterResponse.getClusterName()));
// Configure the settings for our job.
JobPlacement jobPlacement = JobPlacement.newBuilder().setClusterName(clusterName).build();
PySparkJob pySparkJob = PySparkJob.newBuilder().setMainPythonFileUri(jobFilePath).build();
Job job = Job.newBuilder().setPlacement(jobPlacement).setPysparkJob(pySparkJob).build();
// Submit an asynchronous request to execute the job.
OperationFuture<Job, JobMetadata> submitJobAsOperationAsyncRequest = jobControllerClient.submitJobAsOperationAsync(projectId, region, job);
Job jobResponse = submitJobAsOperationAsyncRequest.get();
// Print output from Google Cloud Storage.
Matcher matches = Pattern.compile("gs://(.*?)/(.*)").matcher(jobResponse.getDriverOutputResourceUri());
matches.matches();
Storage storage = StorageOptions.getDefaultInstance().getService();
Blob blob = storage.get(matches.group(1), String.format("%s.000000000", matches.group(2)));
System.out.println(String.format("Job finished successfully: %s", new String(blob.getContent())));
// Delete the cluster.
OperationFuture<Empty, ClusterOperationMetadata> deleteClusterAsyncRequest = clusterControllerClient.deleteClusterAsync(projectId, region, clusterName);
deleteClusterAsyncRequest.get();
System.out.println(String.format("Cluster \"%s\" successfully deleted.", clusterName));
} catch (ExecutionException e) {
System.err.println(String.format("quickstart: %s ", e.getMessage()));
}
}
use of com.google.cloud.dataproc.v1beta2.JobControllerClient in project java-dataproc by googleapis.
the class SubmitHadoopFsJob method submitHadoopFsJob.
public static void submitHadoopFsJob(String projectId, String region, String clusterName, String hadoopFsQuery) throws IOException, InterruptedException {
String myEndpoint = String.format("%s-dataproc.googleapis.com:443", region);
// Configure the settings for the job controller client.
JobControllerSettings jobControllerSettings = JobControllerSettings.newBuilder().setEndpoint(myEndpoint).build();
// but this can also be done manually with the .close() method.
try (JobControllerClient jobControllerClient = JobControllerClient.create(jobControllerSettings)) {
// Configure cluster placement for the job.
JobPlacement jobPlacement = JobPlacement.newBuilder().setClusterName(clusterName).build();
// Configure Hadoop job settings. The HadoopFS query is set here.
HadoopJob hadoopJob = HadoopJob.newBuilder().setMainClass("org.apache.hadoop.fs.FsShell").addAllArgs(stringToList(hadoopFsQuery)).build();
Job job = Job.newBuilder().setPlacement(jobPlacement).setHadoopJob(hadoopJob).build();
// Submit an asynchronous request to execute the job.
OperationFuture<Job, JobMetadata> submitJobAsOperationAsyncRequest = jobControllerClient.submitJobAsOperationAsync(projectId, region, job);
Job response = submitJobAsOperationAsyncRequest.get();
// Print output from Google Cloud Storage.
Matcher matches = Pattern.compile("gs://(.*?)/(.*)").matcher(response.getDriverOutputResourceUri());
matches.matches();
Storage storage = StorageOptions.getDefaultInstance().getService();
Blob blob = storage.get(matches.group(1), String.format("%s.000000000", matches.group(2)));
System.out.println(String.format("Job finished successfully: %s", new String(blob.getContent())));
} catch (ExecutionException e) {
// If the job does not complete successfully, print the error message.
System.err.println(String.format("submitHadoopFSJob: %s ", e.getMessage()));
}
}
Aggregations