Search in sources :

Example 31 with Cluster

use of com.google.cloud.dataproc.v1.Cluster in project java-container by googleapis.

the class ClusterManagerClientTest method createClusterExceptionTest2.

@Test
public void createClusterExceptionTest2() throws Exception {
    StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
    mockClusterManager.addException(exception);
    try {
        String projectId = "projectId-894832108";
        String zone = "zone3744684";
        Cluster cluster = Cluster.newBuilder().build();
        client.createCluster(projectId, zone, cluster);
        Assert.fail("No exception raised");
    } catch (InvalidArgumentException e) {
    // Expected exception.
    }
}
Also used : InvalidArgumentException(com.google.api.gax.rpc.InvalidArgumentException) StatusRuntimeException(io.grpc.StatusRuntimeException) Cluster(com.google.container.v1.Cluster) Test(org.junit.Test)

Example 32 with Cluster

use of com.google.cloud.dataproc.v1.Cluster in project java-container by googleapis.

the class ClusterManagerClientTest method listClustersTest.

@Test
public void listClustersTest() throws Exception {
    ListClustersResponse expectedResponse = ListClustersResponse.newBuilder().addAllClusters(new ArrayList<Cluster>()).addAllMissingZones(new ArrayList<String>()).build();
    mockClusterManager.addResponse(expectedResponse);
    String projectId = "projectId-894832108";
    String zone = "zone3744684";
    ListClustersResponse actualResponse = client.listClusters(projectId, zone);
    Assert.assertEquals(expectedResponse, actualResponse);
    List<AbstractMessage> actualRequests = mockClusterManager.getRequests();
    Assert.assertEquals(1, actualRequests.size());
    ListClustersRequest actualRequest = ((ListClustersRequest) actualRequests.get(0));
    Assert.assertEquals(projectId, actualRequest.getProjectId());
    Assert.assertEquals(zone, actualRequest.getZone());
    Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
Also used : AbstractMessage(com.google.protobuf.AbstractMessage) ArrayList(java.util.ArrayList) Cluster(com.google.container.v1beta1.Cluster) ListClustersRequest(com.google.container.v1beta1.ListClustersRequest) ListClustersResponse(com.google.container.v1beta1.ListClustersResponse) Test(org.junit.Test)

Example 33 with Cluster

use of com.google.cloud.dataproc.v1.Cluster in project java-container by googleapis.

the class ClusterManagerClientTest method createClusterExceptionTest.

@Test
public void createClusterExceptionTest() throws Exception {
    StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
    mockClusterManager.addException(exception);
    try {
        String projectId = "projectId-894832108";
        String zone = "zone3744684";
        Cluster cluster = Cluster.newBuilder().build();
        client.createCluster(projectId, zone, cluster);
        Assert.fail("No exception raised");
    } catch (InvalidArgumentException e) {
    // Expected exception.
    }
}
Also used : InvalidArgumentException(com.google.api.gax.rpc.InvalidArgumentException) StatusRuntimeException(io.grpc.StatusRuntimeException) Cluster(com.google.container.v1beta1.Cluster) Test(org.junit.Test)

Example 34 with Cluster

use of com.google.cloud.dataproc.v1.Cluster in project cdap by cdapio.

the class DataprocClient method updateClusterLabels.

/**
 * Updates labelsToSet on the given Dataproc cluster.
 *
 * @param clusterName name of the cluster
 * @param labelsToSet Key/Value pairs to set on the Dataproc cluster.
 * @param labelsToRemove collection of labels to remove from the Dataproc cluster.
 */
void updateClusterLabels(String clusterName, Map<String, String> labelsToSet, Collection<String> labelsToRemove) throws RetryableProvisionException, InterruptedException {
    if (labelsToSet.isEmpty() && labelsToRemove.isEmpty()) {
        return;
    }
    try {
        Cluster cluster = getDataprocCluster(clusterName).filter(c -> c.getStatus().getState() == ClusterStatus.State.RUNNING).orElseThrow(() -> new DataprocRuntimeException("Dataproc cluster " + clusterName + " does not exist or not in running state"));
        Map<String, String> existingLabels = cluster.getLabelsMap();
        // no need to update the cluster labelsToSet.
        if (labelsToSet.entrySet().stream().allMatch(e -> Objects.equals(e.getValue(), existingLabels.get(e.getKey()))) && labelsToRemove.stream().noneMatch(existingLabels::containsKey)) {
            return;
        }
        Map<String, String> newLabels = new HashMap<>(existingLabels);
        newLabels.keySet().removeAll(labelsToRemove);
        newLabels.putAll(labelsToSet);
        FieldMask updateMask = FieldMask.newBuilder().addPaths("labels").build();
        OperationFuture<Cluster, ClusterOperationMetadata> operationFuture = client.updateClusterAsync(UpdateClusterRequest.newBuilder().setProjectId(conf.getProjectId()).setRegion(conf.getRegion()).setClusterName(clusterName).setCluster(cluster.toBuilder().clearLabels().putAllLabels(newLabels)).setUpdateMask(updateMask).build());
        ClusterOperationMetadata metadata = operationFuture.getMetadata().get();
        int numWarnings = metadata.getWarningsCount();
        if (numWarnings > 0) {
            LOG.warn("Encountered {} warning {} while setting labels on cluster:\n{}", numWarnings, numWarnings > 1 ? "s" : "", String.join("\n", metadata.getWarningsList()));
        }
    } catch (ExecutionException e) {
        Throwable cause = e.getCause();
        if (cause instanceof ApiException) {
            throw handleApiException((ApiException) cause);
        }
        throw new DataprocRuntimeException(cause);
    }
}
Also used : HttpURLConnection(java.net.HttpURLConnection) NetworkPeering(com.google.api.services.compute.model.NetworkPeering) Arrays(java.util.Arrays) OperationFuture(com.google.api.gax.longrunning.OperationFuture) NotFoundException(com.google.api.gax.rpc.NotFoundException) LoggerFactory(org.slf4j.LoggerFactory) HttpStatus(org.apache.http.HttpStatus) FixedCredentialsProvider(com.google.api.gax.core.FixedCredentialsProvider) Network(com.google.api.services.compute.model.Network) DeleteClusterRequest(com.google.cloud.dataproc.v1.DeleteClusterRequest) GoogleJsonResponseException(com.google.api.client.googleapis.json.GoogleJsonResponseException) GeneralSecurityException(java.security.GeneralSecurityException) GetClusterRequest(com.google.cloud.dataproc.v1.GetClusterRequest) Cluster(com.google.cloud.dataproc.v1.Cluster) NodeInitializationAction(com.google.cloud.dataproc.v1.NodeInitializationAction) Map(java.util.Map) CredentialsProvider(com.google.api.gax.core.CredentialsProvider) ParseException(java.text.ParseException) EnumSet(java.util.EnumSet) AutoscalingConfig(com.google.cloud.dataproc.v1.AutoscalingConfig) ImmutableSet(com.google.common.collect.ImmutableSet) ClusterStatus(com.google.cloud.dataproc.v1.ClusterStatus) JacksonFactory(com.google.api.client.json.jackson2.JacksonFactory) Predicate(java.util.function.Predicate) Collection(java.util.Collection) HttpTransport(com.google.api.client.http.HttpTransport) Status(com.google.rpc.Status) Set(java.util.Set) GoogleNetHttpTransport(com.google.api.client.googleapis.javanet.GoogleNetHttpTransport) HttpResponseException(com.google.api.client.http.HttpResponseException) Collectors(java.util.stream.Collectors) AlreadyExistsException(com.google.api.gax.rpc.AlreadyExistsException) Node(io.cdap.cdap.runtime.spi.provisioner.Node) InstanceGroupConfig(com.google.cloud.dataproc.v1.InstanceGroupConfig) Objects(java.util.Objects) List(java.util.List) HttpStatusCodes(com.google.api.client.http.HttpStatusCodes) Stream(java.util.stream.Stream) OperationsClient(com.google.longrunning.OperationsClient) HttpCredentialsAdapter(com.google.auth.http.HttpCredentialsAdapter) FirewallList(com.google.api.services.compute.model.FirewallList) FieldMask(com.google.protobuf.FieldMask) IPRange(io.cdap.cdap.runtime.spi.common.IPRange) SSHPublicKey(io.cdap.cdap.runtime.spi.ssh.SSHPublicKey) Optional(java.util.Optional) Compute(com.google.api.services.compute.Compute) SoftwareConfig(com.google.cloud.dataproc.v1.SoftwareConfig) DataprocUtils(io.cdap.cdap.runtime.spi.common.DataprocUtils) Instance(com.google.api.services.compute.model.Instance) ClusterConfig(com.google.cloud.dataproc.v1.ClusterConfig) SimpleDateFormat(java.text.SimpleDateFormat) HashMap(java.util.HashMap) HttpRequest(com.google.api.client.http.HttpRequest) UpdateClusterRequest(com.google.cloud.dataproc.v1.UpdateClusterRequest) Operation(com.google.longrunning.Operation) GceClusterConfig(com.google.cloud.dataproc.v1.GceClusterConfig) ArrayList(java.util.ArrayList) ClusterOperationMetadata(com.google.cloud.dataproc.v1.ClusterOperationMetadata) HashSet(java.util.HashSet) Strings(com.google.common.base.Strings) NetworkList(com.google.api.services.compute.model.NetworkList) DiskConfig(com.google.cloud.dataproc.v1.DiskConfig) HttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer) AccessConfig(com.google.api.services.compute.model.AccessConfig) Firewall(com.google.api.services.compute.model.Firewall) SocketTimeoutException(java.net.SocketTimeoutException) ShieldedInstanceConfig(com.google.cloud.dataproc.v1.ShieldedInstanceConfig) StreamSupport(java.util.stream.StreamSupport) Nullable(javax.annotation.Nullable) ClusterControllerClient(com.google.cloud.dataproc.v1.ClusterControllerClient) RetryableProvisionException(io.cdap.cdap.runtime.spi.provisioner.RetryableProvisionException) Logger(org.slf4j.Logger) EncryptionConfig(com.google.cloud.dataproc.v1.EncryptionConfig) IOException(java.io.IOException) ApiException(com.google.api.gax.rpc.ApiException) ClusterControllerSettings(com.google.cloud.dataproc.v1.ClusterControllerSettings) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) Duration(com.google.protobuf.Duration) EndpointConfig(com.google.cloud.dataproc.v1.EndpointConfig) LifecycleConfig(com.google.cloud.dataproc.v1.LifecycleConfig) Collections(java.util.Collections) ClusterOperationMetadata(com.google.cloud.dataproc.v1.ClusterOperationMetadata) HashMap(java.util.HashMap) Cluster(com.google.cloud.dataproc.v1.Cluster) ExecutionException(java.util.concurrent.ExecutionException) FieldMask(com.google.protobuf.FieldMask) ApiException(com.google.api.gax.rpc.ApiException)

Example 35 with Cluster

use of com.google.cloud.dataproc.v1.Cluster in project cdap by cdapio.

the class DataprocProvisioner method createCluster.

@Override
public Cluster createCluster(ProvisionerContext context) throws Exception {
    DataprocConf conf = DataprocConf.create(createContextProperties(context));
    if (!isAutoscalingFieldsValid(conf, createContextProperties(context))) {
        LOG.warn("The configs : {}, {}, {} will not be considered when {} is enabled ", DataprocConf.WORKER_NUM_NODES, DataprocConf.SECONDARY_WORKER_NUM_NODES, DataprocConf.AUTOSCALING_POLICY, DataprocConf.PREDEFINED_AUTOSCALE_ENABLED);
    }
    if (context.getRuntimeMonitorType() == RuntimeMonitorType.SSH || !conf.isRuntimeJobManagerEnabled()) {
        // Generates and set the ssh key if it does not have one.
        // Since invocation of this method can come from a retry, we don't need to keep regenerating the keys
        SSHContext sshContext = context.getSSHContext();
        if (sshContext != null) {
            SSHKeyPair sshKeyPair = sshContext.getSSHKeyPair().orElse(null);
            if (sshKeyPair == null) {
                sshKeyPair = sshContext.generate("cdap");
                sshContext.setSSHKeyPair(sshKeyPair);
            }
            conf = DataprocConf.create(createContextProperties(context), sshKeyPair.getPublicKey());
        }
    }
    try (DataprocClient client = getClient(conf)) {
        Cluster reused = tryReuseCluster(client, context, conf);
        if (reused != null) {
            DataprocUtils.emitMetric(context, conf.getRegion(), "provisioner.createCluster.reuse.count");
            return reused;
        }
        String clusterName = getRunKey(context);
        // if it already exists, it means this is a retry. We can skip actually making the request
        Optional<Cluster> existing = client.getCluster(clusterName);
        if (existing.isPresent()) {
            return existing.get();
        }
        String imageVersion = getImageVersion(context, conf);
        String imageDescription = conf.getCustomImageUri();
        if (imageDescription == null || imageDescription.isEmpty()) {
            imageDescription = imageVersion;
        }
        // Reload system context properties and get system labels
        Map<String, String> labels = new HashMap<>();
        labels.putAll(getSystemLabels());
        labels.putAll(getReuseLabels(context, conf));
        labels.putAll(conf.getClusterLabels());
        LOG.info("Creating Dataproc cluster {} in project {}, in region {}, with image {}, with labels {}, endpoint {}", clusterName, conf.getProjectId(), conf.getRegion(), imageDescription, labels, getRootUrl(conf));
        boolean privateInstance = Boolean.parseBoolean(getSystemContext().getProperties().get(PRIVATE_INSTANCE));
        ClusterOperationMetadata createOperationMeta = client.createCluster(clusterName, imageVersion, labels, privateInstance);
        int numWarnings = createOperationMeta.getWarningsCount();
        if (numWarnings > 0) {
            LOG.warn("Encountered {} warning{} while creating Dataproc cluster:\n{}", numWarnings, numWarnings > 1 ? "s" : "", String.join("\n", createOperationMeta.getWarningsList()));
        }
        DataprocUtils.emitMetric(context, conf.getRegion(), "provisioner.createCluster.response.count");
        return new Cluster(clusterName, ClusterStatus.CREATING, Collections.emptyList(), Collections.emptyMap());
    } catch (Exception e) {
        DataprocUtils.emitMetric(context, conf.getRegion(), "provisioner.createCluster.response.count", e);
        throw e;
    }
}
Also used : SSHKeyPair(io.cdap.cdap.runtime.spi.ssh.SSHKeyPair) SSHContext(io.cdap.cdap.runtime.spi.ssh.SSHContext) ClusterOperationMetadata(com.google.cloud.dataproc.v1.ClusterOperationMetadata) HashMap(java.util.HashMap) Cluster(io.cdap.cdap.runtime.spi.provisioner.Cluster) GeneralSecurityException(java.security.GeneralSecurityException) RetryableProvisionException(io.cdap.cdap.runtime.spi.provisioner.RetryableProvisionException) IOException(java.io.IOException)

Aggregations

Test (org.junit.Test)40 AbstractMessage (com.google.protobuf.AbstractMessage)19 ClusterOperationMetadata (com.google.cloud.dataproc.v1.ClusterOperationMetadata)18 ExecutionException (java.util.concurrent.ExecutionException)18 HashMap (java.util.HashMap)17 Cluster (com.google.bigtable.admin.v2.Cluster)16 Cluster (com.google.cloud.dataproc.v1.Cluster)15 ArrayList (java.util.ArrayList)14 ClusterControllerSettings (com.google.cloud.dataproc.v1.ClusterControllerSettings)12 Cluster (com.google.container.v1.Cluster)12 ClusterConfig (com.google.cloud.dataproc.v1.ClusterConfig)10 ClusterControllerClient (com.google.cloud.dataproc.v1.ClusterControllerClient)10 Operation (com.google.longrunning.Operation)10 Cluster (io.envoyproxy.envoy.config.cluster.v3.Cluster)10 RetryableProvisionException (io.cdap.cdap.runtime.spi.provisioner.RetryableProvisionException)8 IOException (java.io.IOException)8 GeneralSecurityException (java.security.GeneralSecurityException)8 Optional (java.util.Optional)8 InvalidArgumentException (com.google.api.gax.rpc.InvalidArgumentException)7 GceClusterConfig (com.google.cloud.dataproc.v1.GceClusterConfig)7