Search in sources :

Example 1 with ResourceMetadata

use of bio.terra.workspace.model.ResourceMetadata in project terra-workspace-manager by DataBiosphere.

the class ControlledGcsBucketLifecycle method testCloneBucket.

private void testCloneBucket(GcpGcsBucketResource sourceBucket, TestUserSpecification cloningUser, ControlledGcpResourceApi resourceApi) throws Exception {
    final String destinationBucketName = "clone-" + UUID.randomUUID().toString();
    // clone the bucket
    final String clonedBucketDescription = "A cloned bucket";
    final CloneControlledGcpGcsBucketRequest cloneRequest = new CloneControlledGcpGcsBucketRequest().bucketName(destinationBucketName).destinationWorkspaceId(getDestinationWorkspaceId()).name(sourceBucket.getMetadata().getName()).description(clonedBucketDescription).location(// use same as src
    null).cloningInstructions(CloningInstructionsEnum.RESOURCE).jobControl(new JobControl().id(UUID.randomUUID().toString()));
    logger.info("Cloning bucket\n\tname: {}\n\tresource ID: {}\n\tworkspace: {}\n\t" + "projectID: {}\ninto destination bucket\n\tname: {}\n\tworkspace: {}\n\tprojectID: {}", sourceBucket.getMetadata().getName(), sourceBucket.getMetadata().getResourceId(), sourceBucket.getMetadata().getWorkspaceId(), getSourceProjectId(), destinationBucketName, getDestinationWorkspaceId(), getDestinationProjectId());
    CloneControlledGcpGcsBucketResult cloneResult = resourceApi.cloneGcsBucket(cloneRequest, sourceBucket.getMetadata().getWorkspaceId(), sourceBucket.getMetadata().getResourceId());
    cloneResult = ClientTestUtils.pollWhileRunning(cloneResult, () -> resourceApi.getCloneGcsBucketResult(cloneRequest.getDestinationWorkspaceId(), cloneRequest.getJobControl().getId()), CloneControlledGcpGcsBucketResult::getJobReport, Duration.ofSeconds(5));
    ClientTestUtils.assertJobSuccess("cloned bucket", cloneResult.getJobReport(), cloneResult.getErrorReport());
    final ClonedControlledGcpGcsBucket clonedBucket = cloneResult.getBucket();
    assertEquals(getWorkspaceId(), clonedBucket.getSourceWorkspaceId());
    assertEquals(sourceBucket.getMetadata().getResourceId(), clonedBucket.getSourceResourceId());
    final CreatedControlledGcpGcsBucket createdBucket = clonedBucket.getBucket();
    final GcpGcsBucketResource clonedResource = createdBucket.getGcpBucket();
    assertEquals(destinationBucketName, clonedResource.getAttributes().getBucketName());
    final ResourceMetadata clonedResourceMetadata = clonedResource.getMetadata();
    assertEquals(getDestinationWorkspaceId(), clonedResourceMetadata.getWorkspaceId());
    assertEquals(sourceBucket.getMetadata().getName(), clonedResourceMetadata.getName());
    assertEquals(clonedBucketDescription, clonedResourceMetadata.getDescription());
    final ResourceMetadata sourceMetadata = sourceBucket.getMetadata();
    assertEquals(CloningInstructionsEnum.NOTHING, clonedResourceMetadata.getCloningInstructions());
    assertEquals(sourceMetadata.getCloudPlatform(), clonedResourceMetadata.getCloudPlatform());
    assertEquals(ResourceType.GCS_BUCKET, clonedResourceMetadata.getResourceType());
    assertEquals(StewardshipType.CONTROLLED, clonedResourceMetadata.getStewardshipType());
    assertEquals(sourceMetadata.getControlledResourceMetadata().getAccessScope(), clonedResourceMetadata.getControlledResourceMetadata().getAccessScope());
    assertEquals(sourceMetadata.getControlledResourceMetadata().getManagedBy(), clonedResourceMetadata.getControlledResourceMetadata().getManagedBy());
    assertEquals(sourceMetadata.getControlledResourceMetadata().getPrivateResourceUser(), clonedResourceMetadata.getControlledResourceMetadata().getPrivateResourceUser());
    assertEquals(CloudPlatform.GCP, clonedResourceMetadata.getCloudPlatform());
    final Storage destinationProjectStorageClient = ClientTestUtils.getGcpStorageClient(cloningUser, getDestinationProjectId());
    final Bucket destinationGcsBucket = destinationProjectStorageClient.get(destinationBucketName);
    // Location, storage class, and lifecycle rules should match values from createBucketAttempt
    assertEquals(StorageClass.STANDARD, destinationGcsBucket.getStorageClass());
    assertEquals(BUCKET_LOCATION, // default since not specified
    destinationGcsBucket.getLocation());
    assertEquals(2, destinationGcsBucket.getLifecycleRules().size());
    verifyClonedLifecycleRules(destinationGcsBucket);
    assertEquals(CloningInstructionsEnum.RESOURCE, clonedBucket.getEffectiveCloningInstructions());
    // test retrieving file from destination bucket
    Storage cloningUserStorageClient = ClientTestUtils.getGcpStorageClient(cloningUser, getDestinationProjectId());
    BlobId blobId = BlobId.of(destinationBucketName, GCS_BLOB_NAME);
    assertNotNull(blobId);
    final Blob retrievedFile = cloningUserStorageClient.get(blobId);
    assertNotNull(retrievedFile);
    assertEquals(blobId.getName(), retrievedFile.getBlobId().getName());
}
Also used : GcpGcsBucketResource(bio.terra.workspace.model.GcpGcsBucketResource) Blob(com.google.cloud.storage.Blob) CloneControlledGcpGcsBucketResult(bio.terra.workspace.model.CloneControlledGcpGcsBucketResult) Storage(com.google.cloud.storage.Storage) Bucket(com.google.cloud.storage.Bucket) CreatedControlledGcpGcsBucket(bio.terra.workspace.model.CreatedControlledGcpGcsBucket) ClonedControlledGcpGcsBucket(bio.terra.workspace.model.ClonedControlledGcpGcsBucket) CloneControlledGcpGcsBucketRequest(bio.terra.workspace.model.CloneControlledGcpGcsBucketRequest) JobControl(bio.terra.workspace.model.JobControl) ClonedControlledGcpGcsBucket(bio.terra.workspace.model.ClonedControlledGcpGcsBucket) ResourceMetadata(bio.terra.workspace.model.ResourceMetadata) BlobId(com.google.cloud.storage.BlobId) CreatedControlledGcpGcsBucket(bio.terra.workspace.model.CreatedControlledGcpGcsBucket)

Example 2 with ResourceMetadata

use of bio.terra.workspace.model.ResourceMetadata in project terra-workspace-manager by DataBiosphere.

the class EnumerateResources method logResult.

private void logResult(String tag, ResourceList resourceList) {
    List<ResourceDescription> descList = resourceList.getResources();
    logger.info("Enumeration results for {} - {} resources", tag, descList.size());
    for (ResourceDescription desc : descList) {
        ResourceMetadata metadata = desc.getMetadata();
        String access = "<null>";
        String managed = "<null>";
        String user = "<null>";
        if (metadata.getStewardshipType() == StewardshipType.CONTROLLED && metadata.getControlledResourceMetadata() != null) {
            ControlledResourceMetadata controlled = metadata.getControlledResourceMetadata();
            access = controlled.getAccessScope().toString();
            managed = controlled.getManagedBy().toString();
            user = controlled.getPrivateResourceUser().getUserName();
        }
        logger.info("  {}: id={} type={} stew={} cloud={} access={} managed={} user={}", metadata.getName(), metadata.getResourceId(), metadata.getResourceType(), metadata.getStewardshipType(), metadata.getCloudPlatform(), access, managed, user);
    }
}
Also used : ControlledResourceMetadata(bio.terra.workspace.model.ControlledResourceMetadata) ResourceDescription(bio.terra.workspace.model.ResourceDescription) Matchers.containsString(org.hamcrest.Matchers.containsString) ControlledResourceMetadata(bio.terra.workspace.model.ControlledResourceMetadata) ResourceMetadata(bio.terra.workspace.model.ResourceMetadata)

Example 3 with ResourceMetadata

use of bio.terra.workspace.model.ResourceMetadata in project terra-workspace-manager by DataBiosphere.

the class EnumerateJobs method doUserJourney.

@Override
public void doUserJourney(TestUserSpecification testUser, WorkspaceApi workspaceApi) throws Exception {
    // Case 1: fetch all
    EnumerateJobsResult fetchall = alpha1Api.enumerateJobs(getWorkspaceId(), null, null, null, null, null, null);
    logResult("fetchall", fetchall);
    // TODO: [PF-1281] we need another type of filtering to be able to do better validation of the
    // result return.
    // Case 2: fetch by pages
    String pageToken = null;
    for (int pageCount = 1; true; pageCount++) {
        EnumerateJobsResult page = alpha1Api.enumerateJobs(getWorkspaceId(), PAGE_SIZE, pageToken, null, null, null, null);
        logResult("page " + pageCount, page);
        assertThat("Not more than page size items returned", page.getResults().size(), lessThanOrEqualTo(PAGE_SIZE));
        pageToken = page.getPageToken();
        if (page.getResults().size() == 0) {
            break;
        }
    }
    // Case 4: filter by resource type
    EnumerateJobsResult buckets = alpha1Api.enumerateJobs(getWorkspaceId(), /*limit=*/
    null, /*pageToken=*/
    null, ResourceType.GCS_BUCKET, /*stewardship=*/
    null, /*name=*/
    null, /*jobState=*/
    null);
    logResult("buckets", buckets);
    for (EnumeratedJob job : buckets.getResults()) {
        assertThat("Job is a bucket", job.getResourceType(), equalTo(ResourceType.GCS_BUCKET));
        assertNotNull(job.getResource().getGcpGcsBucket(), "Bucket resource present");
        assertThat("Resource is a bucket", job.getResource().getGcpGcsBucket().getMetadata().getResourceType(), equalTo(ResourceType.GCS_BUCKET));
    }
    // Case 5: filter by stewardship type
    EnumerateJobsResult controlled = alpha1Api.enumerateJobs(getWorkspaceId(), null, null, null, StewardshipType.CONTROLLED, null, null);
    logResult("controlled", controlled);
    for (EnumeratedJob job : controlled.getResults()) {
        ResourceMetadata metadata = getResourceMetadata(job);
        assertNotNull(metadata, "Resource has metadata");
        assertThat("Resource is controlled", metadata.getStewardshipType(), equalTo(StewardshipType.CONTROLLED));
    }
    // Case 6: filter by resource and stewardship
    EnumerateJobsResult controlledBuckets = alpha1Api.enumerateJobs(getWorkspaceId(), null, null, ResourceType.GCS_BUCKET, StewardshipType.CONTROLLED, null, null);
    logResult("controlledBuckets", controlledBuckets);
    for (EnumeratedJob job : controlledBuckets.getResults()) {
        ResourceMetadata metadata = getResourceMetadata(job);
        assertNotNull(metadata, "Resource has metadata");
        assertThat("Resource is controlled", metadata.getStewardshipType(), equalTo(StewardshipType.CONTROLLED));
        assertThat("Resource is a bucket", metadata.getResourceType(), equalTo(ResourceType.GCS_BUCKET));
    }
    // Case 7: validate error on invalid pagination params
    ApiException invalidPaginationException = assertThrows(ApiException.class, () -> alpha1Api.enumerateJobs(getWorkspaceId(), -5, null, null, null, null, null));
    assertThat(invalidPaginationException.getMessage(), containsString("Invalid pagination"));
    invalidPaginationException = assertThrows(ApiException.class, () -> alpha1Api.enumerateJobs(getWorkspaceId(), 22, "junktoken", null, null, null, null));
    assertThat(invalidPaginationException.getMessage(), containsString("Invalid page token"));
}
Also used : EnumerateJobsResult(bio.terra.workspace.model.EnumerateJobsResult) Matchers.containsString(org.hamcrest.Matchers.containsString) ResourceMetadata(bio.terra.workspace.model.ResourceMetadata) EnumeratedJob(bio.terra.workspace.model.EnumeratedJob) ApiException(bio.terra.workspace.client.ApiException)

Example 4 with ResourceMetadata

use of bio.terra.workspace.model.ResourceMetadata in project terra-workspace-manager by DataBiosphere.

the class ControlledBigQueryDatasetLifecycle method testCloneBigQueryDataset.

private void testCloneBigQueryDataset(GcpBigQueryDatasetResource sourceDataset, TestUserSpecification cloningUser, ControlledGcpResourceApi cloningUserResourceApi) throws Exception {
    final String destinationDatasetName = ("clone_" + UUID.randomUUID().toString()).replace('-', '_');
    // clone the dataset as the cloning user
    final String clonedDatasetDescription = "Clone of " + destinationDatasetName;
    final String jobId = UUID.randomUUID().toString();
    final CloneControlledGcpBigQueryDatasetRequest cloneRequest = new CloneControlledGcpBigQueryDatasetRequest().cloningInstructions(CloningInstructionsEnum.RESOURCE).description(clonedDatasetDescription).location(// keep same
    null).destinationWorkspaceId(getDestinationWorkspaceId()).name("MyClonedDataset").jobControl(new JobControl().id(jobId)).destinationDatasetName(// keep same
    null);
    final ResourceMetadata sourceDatasetMetadata = sourceDataset.getMetadata();
    logger.info("Cloning BigQuery dataset\n\tname: {}\n\tresource ID: {}\n\tworkspace: {}\n\t" + "projectID: {}\ninto destination \n\tname: {}\n\tworkspace: {}\n\tprojectID: {}", sourceDatasetMetadata.getName(), sourceDatasetMetadata.getResourceId(), sourceDatasetMetadata.getWorkspaceId(), getSourceProjectId(), sourceDatasetMetadata.getName(), getDestinationWorkspaceId(), getDestinationProjectId());
    // Submit clone request and poll for async result
    CloneControlledGcpBigQueryDatasetResult cloneResult = cloningUserResourceApi.cloneBigQueryDataset(cloneRequest, sourceDatasetMetadata.getWorkspaceId(), sourceDatasetMetadata.getResourceId());
    cloneResult = ClientTestUtils.pollWhileRunning(cloneResult, () -> cloningUserResourceApi.getCloneBigQueryDatasetResult(cloneRequest.getDestinationWorkspaceId(), cloneRequest.getJobControl().getId()), CloneControlledGcpBigQueryDatasetResult::getJobReport, Duration.ofSeconds(5));
    ClientTestUtils.assertJobSuccess("clone BigQuery dataset", cloneResult.getJobReport(), cloneResult.getErrorReport());
    assertEquals(sourceDatasetMetadata.getWorkspaceId(), cloneResult.getDataset().getSourceWorkspaceId());
    assertEquals(sourceDatasetMetadata.getResourceId(), cloneResult.getDataset().getSourceResourceId());
    // unwrap the result one layer at a time
    final ClonedControlledGcpBigQueryDataset clonedControlledGcpBigQueryDataset = cloneResult.getDataset();
    assertEquals(CloningInstructionsEnum.RESOURCE, clonedControlledGcpBigQueryDataset.getEffectiveCloningInstructions());
    final GcpBigQueryDatasetResource clonedResource = clonedControlledGcpBigQueryDataset.getDataset();
    final ResourceMetadata clonedDatasetMetadata = clonedResource.getMetadata();
    assertEquals(sourceDatasetMetadata.getCloningInstructions(), clonedDatasetMetadata.getCloningInstructions());
    assertEquals(sourceDatasetMetadata.getCloudPlatform(), clonedDatasetMetadata.getCloudPlatform());
    assertEquals(sourceDatasetMetadata.getResourceType(), clonedDatasetMetadata.getResourceType());
    assertEquals(sourceDatasetMetadata.getStewardshipType(), clonedDatasetMetadata.getStewardshipType());
    assertEquals(sourceDatasetMetadata.getControlledResourceMetadata().getManagedBy(), clonedDatasetMetadata.getControlledResourceMetadata().getManagedBy());
    assertEquals(sourceDatasetMetadata.getControlledResourceMetadata().getAccessScope(), clonedDatasetMetadata.getControlledResourceMetadata().getAccessScope());
    assertNotEquals(sourceDataset.getAttributes().getProjectId(), clonedResource.getAttributes().getProjectId());
    assertEquals(sourceDataset.getAttributes().getDatasetId(), clonedResource.getAttributes().getDatasetId());
    assertEquals(sourceDataset.getMetadata().getName(), sourceDataset.getAttributes().getDatasetId());
    // compare dataset contents
    final BigQuery bigQueryClient = ClientTestUtils.getGcpBigQueryClient(cloningUser, getDestinationProjectId());
    // Create an empty table to hold results
    TableId resultTableId = TableId.of(getDestinationProjectId(), clonedResource.getAttributes().getDatasetId(), "results_table");
    final QueryJobConfiguration employeeQueryJobConfiguration = QueryJobConfiguration.newBuilder("SELECT * FROM `" + getDestinationProjectId() + "." + clonedResource.getAttributes().getDatasetId() + ".employee`;").setDestinationTable(resultTableId).setWriteDisposition(WriteDisposition.WRITE_TRUNCATE).build();
    final TableResult employeeTableResult = bigQueryClient.query(employeeQueryJobConfiguration);
    final long numRows = StreamSupport.stream(employeeTableResult.getValues().spliterator(), false).count();
    assertThat(numRows, is(greaterThanOrEqualTo(2L)));
    final TableResult departmentTableResult = bigQueryClient.query(QueryJobConfiguration.newBuilder("SELECT * FROM `" + getDestinationProjectId() + "." + clonedResource.getAttributes().getDatasetId() + ".department` " + "WHERE department_id = 201;").setDestinationTable(resultTableId).setWriteDisposition(WriteDisposition.WRITE_TRUNCATE).build());
    final FieldValueList row = StreamSupport.stream(departmentTableResult.getValues().spliterator(), false).findFirst().orElseThrow(() -> new RuntimeException("Can't find expected result row"));
    // Assert data matches the expected values from BqDatasetUtils.populateBigQueryDataset
    final FieldValue nameFieldValue = row.get("name");
    assertEquals("ocean", nameFieldValue.getStringValue());
    final FieldValue managerFieldValue = row.get("manager_id");
    assertEquals(101, managerFieldValue.getLongValue());
}
Also used : TableId(com.google.cloud.bigquery.TableId) BigQuery(com.google.cloud.bigquery.BigQuery) JobControl(bio.terra.workspace.model.JobControl) CloneControlledGcpBigQueryDatasetResult(bio.terra.workspace.model.CloneControlledGcpBigQueryDatasetResult) ClonedControlledGcpBigQueryDataset(bio.terra.workspace.model.ClonedControlledGcpBigQueryDataset) ResourceMetadata(bio.terra.workspace.model.ResourceMetadata) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource) CloneControlledGcpBigQueryDatasetRequest(bio.terra.workspace.model.CloneControlledGcpBigQueryDatasetRequest) TableResult(com.google.cloud.bigquery.TableResult) FieldValueList(com.google.cloud.bigquery.FieldValueList) FieldValue(com.google.cloud.bigquery.FieldValue) QueryJobConfiguration(com.google.cloud.bigquery.QueryJobConfiguration)

Aggregations

ResourceMetadata (bio.terra.workspace.model.ResourceMetadata)4 JobControl (bio.terra.workspace.model.JobControl)2 Matchers.containsString (org.hamcrest.Matchers.containsString)2 ApiException (bio.terra.workspace.client.ApiException)1 CloneControlledGcpBigQueryDatasetRequest (bio.terra.workspace.model.CloneControlledGcpBigQueryDatasetRequest)1 CloneControlledGcpBigQueryDatasetResult (bio.terra.workspace.model.CloneControlledGcpBigQueryDatasetResult)1 CloneControlledGcpGcsBucketRequest (bio.terra.workspace.model.CloneControlledGcpGcsBucketRequest)1 CloneControlledGcpGcsBucketResult (bio.terra.workspace.model.CloneControlledGcpGcsBucketResult)1 ClonedControlledGcpBigQueryDataset (bio.terra.workspace.model.ClonedControlledGcpBigQueryDataset)1 ClonedControlledGcpGcsBucket (bio.terra.workspace.model.ClonedControlledGcpGcsBucket)1 ControlledResourceMetadata (bio.terra.workspace.model.ControlledResourceMetadata)1 CreatedControlledGcpGcsBucket (bio.terra.workspace.model.CreatedControlledGcpGcsBucket)1 EnumerateJobsResult (bio.terra.workspace.model.EnumerateJobsResult)1 EnumeratedJob (bio.terra.workspace.model.EnumeratedJob)1 GcpBigQueryDatasetResource (bio.terra.workspace.model.GcpBigQueryDatasetResource)1 GcpGcsBucketResource (bio.terra.workspace.model.GcpGcsBucketResource)1 ResourceDescription (bio.terra.workspace.model.ResourceDescription)1 BigQuery (com.google.cloud.bigquery.BigQuery)1 FieldValue (com.google.cloud.bigquery.FieldValue)1 FieldValueList (com.google.cloud.bigquery.FieldValueList)1