use of com.google.cloud.dataproc.v1.ClusterControllerClient in project cdap by caskdata.
the class DataprocClient method getClusterControllerClient.
/*
* Using the input Google Credentials retrieve the Dataproc Cluster controller client
*/
private static ClusterControllerClient getClusterControllerClient(DataprocConf conf) throws IOException {
CredentialsProvider credentialsProvider = FixedCredentialsProvider.create(conf.getDataprocCredentials());
String rootUrl = Optional.ofNullable(conf.getRootUrl()).orElse(ClusterControllerSettings.getDefaultEndpoint());
String regionalEndpoint = conf.getRegion() + "-" + rootUrl;
ClusterControllerSettings controllerSettings = ClusterControllerSettings.newBuilder().setCredentialsProvider(credentialsProvider).setEndpoint(regionalEndpoint).build();
return ClusterControllerClient.create(controllerSettings);
}
use of com.google.cloud.dataproc.v1.ClusterControllerClient in project cdap by caskdata.
the class DataprocClient method getDataprocClient.
private static DataprocClient getDataprocClient(DataprocConf conf, boolean requireNetwork) throws IOException, GeneralSecurityException, RetryableProvisionException {
ClusterControllerClient client = getClusterControllerClient(conf);
Compute compute = getCompute(conf);
if (!requireNetwork) {
return new DataprocClient(conf, client, compute, null);
}
String network = conf.getNetwork();
String systemNetwork = null;
try {
systemNetwork = DataprocUtils.getSystemNetwork();
} catch (IllegalArgumentException e) {
// expected when not running on GCP, ignore
}
String projectId = conf.getProjectId();
String networkHostProjectId = conf.getNetworkHostProjectID();
String systemProjectId = null;
try {
systemProjectId = DataprocUtils.getSystemProjectId();
} catch (IllegalArgumentException e) {
// expected when not running on GCP, ignore
}
if (network == null && projectId.equals(systemProjectId)) {
// If the CDAP instance is running on a GCE/GKE VM from a project that matches the provisioner project,
// use the network of that VM.
network = systemNetwork;
} else if (network == null) {
// Otherwise, pick a network from the configured project using the Compute API
network = findNetwork(networkHostProjectId, compute);
}
if (network == null) {
throw new IllegalArgumentException("Unable to automatically detect a network, please explicitly set a network.");
}
String subnet = conf.getSubnet();
Network networkInfo = getNetworkInfo(networkHostProjectId, network, compute);
List<String> subnets = networkInfo.getSubnetworks();
if (subnet != null && !subnetExists(subnets, subnet)) {
throw new IllegalArgumentException(String.format("Subnet '%s' does not exist in network '%s' in project '%s'. " + "Please use a different subnet.", subnet, network, networkHostProjectId));
}
// if the network uses custom subnets, a subnet must be provided to the dataproc api
boolean autoCreateSubnet = networkInfo.getAutoCreateSubnetworks() == null ? false : networkInfo.getAutoCreateSubnetworks();
if (!autoCreateSubnet) {
// if the network uses custom subnets but none exist, error out
if (subnets == null || subnets.isEmpty()) {
throw new IllegalArgumentException(String.format("Network '%s' in project '%s' does not contain any subnets. " + "Please create a subnet or use a different network.", network, networkHostProjectId));
}
}
subnet = chooseSubnet(network, subnets, subnet, conf.getRegion());
return new DataprocClient(new DataprocConf(conf, network, subnet), client, compute, networkInfo);
}
Aggregations