use of com.google.api.services.notebooks.v1.model.Operation in project platinum by hartwigmedical.
the class KubernetesEngineTest method mockForClusterCreation.
public void mockForClusterCreation() throws IOException {
Operations operations = mock(Operations.class);
Operations.Get operationsGet = mock(Operations.Get.class);
Operation executedOperationsGet = mock(Operation.class);
when(locations.operations()).thenReturn(operations);
when(operations.get(anyString())).thenReturn(operationsGet);
when(operationsGet.execute()).thenReturn(executedOperationsGet);
when(executedOperationsGet.getStatus()).thenReturn("DONE");
}
use of com.google.api.services.notebooks.v1.model.Operation in project platinum by hartwigmedical.
the class KubernetesEngineTest method usesConfiguredClusterName.
@Test
public void usesConfiguredClusterName() throws Exception {
Get foundOperation = mock(Get.class);
Create created = mock(Create.class);
Operation executedCreate = mock(Operation.class);
when(clusters.get(anyString())).thenReturn(foundOperation);
when(clusters.create(eq(format("projects/%s/locations/%s", PROJECT, REGION)), any())).thenReturn(created);
when(created.execute()).thenReturn(executedCreate);
when(executedCreate.getName()).thenReturn("created");
when(clusters.get(anyString())).thenReturn(foundOperation);
when(foundOperation.execute()).thenThrow(GoogleJsonResponseException.class);
ArgumentCaptor<CreateClusterRequest> createRequest = ArgumentCaptor.forClass(CreateClusterRequest.class);
when(clusters.create(eq(format("projects/%s/locations/%s", PROJECT, REGION)), createRequest.capture())).thenReturn(created);
when(created.execute()).thenReturn(executedCreate);
when(executedCreate.getName()).thenReturn("created");
mockForClusterCreation();
victim.findOrCreate(RUN_NAME, Collections.emptyList(), JSON_KEY, BUCKET, SERVICE_ACCOUNT);
assertThat(createRequest.getValue().getCluster().getName()).isEqualTo("runName");
}
use of com.google.api.services.notebooks.v1.model.Operation in project platinum by hartwigmedical.
the class KubernetesEngine method create.
private static void create(final Container containerApi, final String parent, final String cluster, final GcpConfiguration gcpConfiguration) {
try {
Cluster newCluster = new Cluster();
newCluster.setName(cluster);
newCluster.setNetwork(gcpConfiguration.networkUrl());
newCluster.setSubnetwork(gcpConfiguration.subnetUrl());
newCluster.setLocations(gcpConfiguration.zones());
NodePool defaultNodePool = new NodePool().setName("default").setInitialNodeCount(2);
final NodeConfig nodeConfig = new NodeConfig().setPreemptible(gcpConfiguration.preemptibleCluster()).setOauthScopes(List.of("https://www.googleapis.com/auth/cloud-platform")).setDiskSizeGb(500);
if (!gcpConfiguration.networkTags().isEmpty()) {
nodeConfig.setTags(gcpConfiguration.networkTags());
}
defaultNodePool.setConfig(nodeConfig);
newCluster.setNodePools(List.of(defaultNodePool));
IPAllocationPolicy ipAllocationPolicy = new IPAllocationPolicy();
if (gcpConfiguration.privateCluster()) {
PrivateClusterConfig privateClusterConfig = new PrivateClusterConfig();
privateClusterConfig.setEnablePrivateEndpoint(true);
privateClusterConfig.setEnablePrivateNodes(true);
privateClusterConfig.setMasterIpv4CidrBlock(gcpConfiguration.masterIpv4CidrBlock());
newCluster.setPrivateCluster(true);
newCluster.setPrivateClusterConfig(privateClusterConfig);
ipAllocationPolicy.setUseIpAliases(true);
}
if (gcpConfiguration.secondaryRangeNamePods().isPresent() && gcpConfiguration.secondaryRangeNameServices().isPresent()) {
ipAllocationPolicy.setClusterSecondaryRangeName(gcpConfiguration.secondaryRangeNamePods().get());
ipAllocationPolicy.setServicesSecondaryRangeName(gcpConfiguration.secondaryRangeNameServices().get());
}
newCluster.setIpAllocationPolicy(ipAllocationPolicy);
CreateClusterRequest createRequest = new CreateClusterRequest();
createRequest.setCluster(newCluster);
Create created = containerApi.projects().locations().clusters().create(parent, createRequest);
Operation execute = created.execute();
LOGGER.info("Creating new kubernetes cluster {} in project {} and region {}, this can take upwards of 5 minutes...", Console.bold(newCluster.getName()), Console.bold(gcpConfiguration.projectOrThrow()), Console.bold(gcpConfiguration.regionOrThrow()));
Failsafe.with(new RetryPolicy<>().withMaxDuration(ofMinutes(15)).withDelay(ofSeconds(15)).withMaxAttempts(-1).handleResult(null).handleResult("RUNNING")).onFailure(objectExecutionCompletedEvent -> LOGGER.info("Waiting on operation, status is [{}]", objectExecutionCompletedEvent.getResult())).get(() -> containerApi.projects().locations().operations().get(String.format("projects/%s/locations/%s/operations/%s", gcpConfiguration.projectOrThrow(), gcpConfiguration.regionOrThrow(), execute.getName())).execute().getStatus());
} catch (Exception e) {
throw new RuntimeException("Failed to create cluster", e);
}
}
use of com.google.api.services.notebooks.v1.model.Operation in project java-docs-samples by GoogleCloudPlatform.
the class CheckLatestTransferOperationApiary method checkLatestTransferOperationApiary.
// Gets the requested transfer job and checks its latest operation
public static void checkLatestTransferOperationApiary(String projectId, String jobName) throws IOException {
// Your Google Cloud Project ID
// String projectId = "your-project-id";
// The name of the job to check
// String jobName = "myJob/1234567890";
// Create Storage Transfer client
GoogleCredentials credential = GoogleCredentials.getApplicationDefault();
if (credential.createScopedRequired()) {
credential = credential.createScoped(StoragetransferScopes.all());
}
Storagetransfer storageTransfer = new Storagetransfer.Builder(Utils.getDefaultTransport(), Utils.getDefaultJsonFactory(), new HttpCredentialsAdapter(credential)).build();
// Get transfer job and check latest operation
TransferJob transferJob = storageTransfer.transferJobs().get(jobName, projectId).execute();
String latestOperationName = transferJob.getLatestOperationName();
if (latestOperationName != null) {
Operation latestOperation = storageTransfer.transferOperations().get(latestOperationName).execute();
System.out.println("The latest operation for transfer job " + jobName + " is:");
System.out.println(latestOperation.toPrettyString());
} else {
System.out.println("Transfer job " + jobName + " does not have an operation scheduled yet," + " try again once the job starts running.");
}
}
use of com.google.api.services.notebooks.v1.model.Operation in project java-docs-samples by GoogleCloudPlatform.
the class DicomStoreImport method dicomStoreImport.
public static void dicomStoreImport(String dicomStoreName, String gcsUri) throws IOException {
// String dicomStoreName =
// String.format(
// DICOM_NAME, "your-project-id", "your-region-id", "your-dataset-id", "your-dicom-id");
// String gcsUri = "gs://your-bucket-id/path/to/destination/dir"
// Initialize the client, which will be used to interact with the service.
CloudHealthcare client = createClient();
// Configure where the store should be imported from.
GoogleCloudHealthcareV1DicomGcsSource gcsSource = new GoogleCloudHealthcareV1DicomGcsSource().setUri(gcsUri);
ImportDicomDataRequest importRequest = new ImportDicomDataRequest().setGcsSource(gcsSource);
// Create request and configure any parameters.
DicomStores.CloudHealthcareImport request = client.projects().locations().datasets().dicomStores().healthcareImport(dicomStoreName, importRequest);
// Execute the request, wait for the operation to complete, and process the results.
try {
Operation operation = request.execute();
while (operation.getDone() == null || !operation.getDone()) {
// Update the status of the operation with another request.
// Pause for 500ms between requests.
Thread.sleep(500);
operation = client.projects().locations().datasets().operations().get(operation.getName()).execute();
}
System.out.println("DICOM store import complete." + operation.getResponse());
} catch (Exception ex) {
System.out.printf("Error during request execution: %s", ex.toString());
ex.printStackTrace(System.out);
}
}
Aggregations