Search in sources :

Example 1 with GcpBigQueryDatasetResource

use of bio.terra.workspace.model.GcpBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class DeleteWorkspaceWithControlledResource method doUserJourney.

@Override
protected void doUserJourney(TestUserSpecification testUser, WorkspaceApi workspaceApi) throws Exception {
    ControlledGcpResourceApi resourceApi = ClientTestUtils.getControlledGcpResourceClient(testUser, server);
    // Create a cloud context
    String projectId = CloudContextMaker.createGcpCloudContext(getWorkspaceId(), workspaceApi);
    logger.info("Created project {}", projectId);
    // Create a shared BigQuery dataset
    GcpBigQueryDatasetResource createdDataset = BqDatasetUtils.makeControlledBigQueryDatasetUserShared(resourceApi, getWorkspaceId(), DATASET_RESOURCE_NAME, null, null);
    UUID resourceId = createdDataset.getMetadata().getResourceId();
    logger.info("Created controlled dataset {}", resourceId);
    // Confirm the dataset was created in WSM
    GcpBigQueryDatasetResource fetchedDataset = resourceApi.getBigQueryDataset(getWorkspaceId(), resourceId);
    assertEquals(createdDataset, fetchedDataset);
    // Delete the workspace, which should delete the included context and resource
    workspaceApi.deleteWorkspace(getWorkspaceId());
    // Confirm the workspace is deleted
    var workspaceMissingException = assertThrows(ApiException.class, () -> workspaceApi.getWorkspace(getWorkspaceId()));
    assertEquals(HttpStatus.SC_NOT_FOUND, workspaceMissingException.getCode());
    // Confirm the controlled resource was deleted
    var resourceMissingException = assertThrows(ApiException.class, () -> resourceApi.getBigQueryDataset(getWorkspaceId(), resourceId));
    assertEquals(HttpStatus.SC_NOT_FOUND, resourceMissingException.getCode());
}
Also used : ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) UUID(java.util.UUID) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource)

Example 2 with GcpBigQueryDatasetResource

use of bio.terra.workspace.model.GcpBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ControlledBigQueryDatasetLifecycle method doUserJourney.

@Override
protected void doUserJourney(TestUserSpecification testUser, WorkspaceApi workspaceApi) throws Exception {
    ControlledGcpResourceApi ownerResourceApi = ClientTestUtils.getControlledGcpResourceClient(testUser, server);
    // Add a writer the source workspace. Reader is already added by the base class
    logger.info("Adding {} as writer to workspace {}", writer.userEmail, getWorkspaceId());
    workspaceApi.grantRole(new GrantRoleRequestBody().memberEmail(writer.userEmail), getWorkspaceId(), IamRole.WRITER);
    SamClientUtils.dumpResourcePolicy(testUser, server, "workspace", getWorkspaceId().toString());
    // Create a shared BigQuery dataset
    GcpBigQueryDatasetResource createdDataset = BqDatasetUtils.makeControlledBigQueryDatasetUserShared(ownerResourceApi, getWorkspaceId(), DATASET_RESOURCE_NAME, /*datasetId=*/
    null, /*cloningInstructions=*/
    null);
    assertEquals(DATASET_RESOURCE_NAME, createdDataset.getAttributes().getDatasetId());
    UUID resourceId = createdDataset.getMetadata().getResourceId();
    // Retrieve the dataset resource
    logger.info("Retrieving dataset resource id {}", resourceId.toString());
    GcpBigQueryDatasetResource fetchedResource = ownerResourceApi.getBigQueryDataset(getWorkspaceId(), resourceId);
    assertEquals(createdDataset, fetchedResource);
    assertEquals(DATASET_RESOURCE_NAME, fetchedResource.getAttributes().getDatasetId());
    createControlledDatasetWithBothResourceNameAndDatasetIdSpecified(ownerResourceApi);
    BigQuery ownerBqClient = ClientTestUtils.getGcpBigQueryClient(testUser, getSourceProjectId());
    BigQuery writerBqClient = ClientTestUtils.getGcpBigQueryClient(writer, getSourceProjectId());
    BigQuery readerBqClient = ClientTestUtils.getGcpBigQueryClient(getWorkspaceReader(), getSourceProjectId());
    // Workspace owner can create a table in this dataset
    Table table = createTable(ownerBqClient, getSourceProjectId());
    String tableName = table.getTableId().getTable();
    // Workspace reader can read the table
    // This is the reader's first use of cloud APIs after being added to the workspace, so we
    // retry this operation until cloud IAM has properly synced.
    var readTable = ClientTestUtils.getWithRetryOnException(() -> readerBqClient.getTable(table.getTableId()));
    assertEquals(table, readTable);
    logger.info("Read table {} as workspace reader", tableName);
    // Workspace reader cannot modify tables
    Table readerUpdatedTable = table.toBuilder().setDescription("A new table description").build();
    assertThrows(BigQueryException.class, () -> readerBqClient.update(readerUpdatedTable), "Workspace reader was able to modify table metadata");
    logger.info("Workspace reader could not modify table {} metadata as expected", tableName);
    // Workspace reader cannot write data to tables
    assertThrows(BigQueryException.class, () -> insertValueIntoTable(readerBqClient, "some value"), "Workspace reader was able to insert data into a table");
    logger.info("Workspace reader could not modify table {} contents as expected", tableName);
    // Workspace writer can also read the table
    // This is the writer's first use of cloud APIs after being added to the workspace, so we
    // retry this operation until cloud IAM has properly synced.
    var writerReadTable = ClientTestUtils.getWithRetryOnException(() -> writerBqClient.getTable(table.getTableId()));
    assertEquals(table, writerReadTable);
    logger.info("Read table {} as workspace writer", tableName);
    // In contrast, a workspace writer can write data to tables
    String columnValue = "this value lives in a table";
    insertValueIntoTable(writerBqClient, columnValue);
    logger.info("Workspace writer wrote a row to table {}", tableName);
    // Create a dataset to hold query results in the destination project.
    ControlledGcpResourceApi readerResourceApi = ClientTestUtils.getControlledGcpResourceClient(getWorkspaceReader(), server);
    String resultDatasetId = "temporary_result_dataset";
    GcpBigQueryDatasetResource temporaryResultDataset = BqDatasetUtils.makeControlledBigQueryDatasetUserShared(readerResourceApi, getDestinationWorkspaceId(), "temporary_result_resource", resultDatasetId, CloningInstructionsEnum.NOTHING);
    // The table does not exist yet, but will be created to hold query results.
    TableId resultTableId = TableId.of(getDestinationProjectId(), resultDatasetId, BqDatasetUtils.BQ_RESULT_TABLE_NAME);
    // Workspace reader can now read the row inserted above
    assertEquals(columnValue, readValueFromTable(readerBqClient, resultTableId));
    logger.info("Workspace reader read that row from table {}", tableName);
    // Workspace writer can update the table metadata
    String newDescription = "Another new table description";
    Table writerUpdatedTable = table.toBuilder().setDescription(newDescription).build();
    Table updatedTable = writerBqClient.update(writerUpdatedTable);
    assertEquals(newDescription, updatedTable.getDescription());
    logger.info("Workspace writer modified table {} metadata", tableName);
    // Workspace owner can update the dataset resource through WSM
    String resourceDescription = "a description for WSM";
    Integer defaultTableLifetimeSec = 5400;
    var updateDatasetRequest = new UpdateControlledGcpBigQueryDatasetRequestBody().description(resourceDescription).updateParameters(new GcpBigQueryDatasetUpdateParameters().defaultTableLifetime(defaultTableLifetimeSec));
    ownerResourceApi.updateBigQueryDataset(updateDatasetRequest, getWorkspaceId(), resourceId);
    var datasetAfterUpdate = ownerResourceApi.getBigQueryDataset(getWorkspaceId(), resourceId);
    assertEquals(datasetAfterUpdate.getMetadata().getDescription(), resourceDescription);
    logger.info("Workspace owner updated resource {}", resourceId);
    // However, invalid updates are rejected.
    String invalidName = "!!!invalid_name!!!";
    var invalidUpdateDatasetRequest = new UpdateControlledGcpBigQueryDatasetRequestBody().name(invalidName);
    ApiException invalidUpdateEx = assertThrows(ApiException.class, () -> ownerResourceApi.updateBigQueryDataset(invalidUpdateDatasetRequest, getWorkspaceId(), resourceId));
    assertEquals(HttpStatusCodes.STATUS_CODE_BAD_REQUEST, invalidUpdateEx.getCode());
    // Cloud metadata matches the updated values
    Dataset cloudDataset = ownerBqClient.getDataset(DatasetId.of(getSourceProjectId(), DATASET_RESOURCE_NAME));
    assertEquals(defaultTableLifetimeSec * 1000L, cloudDataset.getDefaultTableLifetime());
    assertNull(cloudDataset.getDefaultPartitionExpirationMs());
    // Workspace writer can delete the table we created earlier
    logger.info("Deleting table {} from dataset {}", table.getTableId().getTable(), DATASET_RESOURCE_NAME);
    assertTrue(writerBqClient.delete(TableId.of(getSourceProjectId(), DATASET_RESOURCE_NAME, table.getTableId().getTable())));
    // Workspace reader can clean up the results table and dataset before cloning
    readerResourceApi.deleteBigQueryDataset(getDestinationWorkspaceId(), temporaryResultDataset.getMetadata().getResourceId());
    // Populate dataset with additional tables to verify cloning behavior
    BqDatasetUtils.populateBigQueryDataset(createdDataset, testUser, getSourceProjectId());
    // Verify workspace reader is able to clone the resource they can read
    testCloneBigQueryDataset(createdDataset, getWorkspaceReader(), readerResourceApi);
    // The reader should be able to enumerate the dataset.
    ResourceApi readerApi = ClientTestUtils.getResourceClient(getWorkspaceReader(), server);
    ResourceList datasetList = readerApi.enumerateResources(getWorkspaceId(), 0, 5, ResourceType.BIG_QUERY_DATASET, StewardshipType.CONTROLLED);
    assertEquals(1, datasetList.getResources().size());
    MultiResourcesUtils.assertResourceType(ResourceType.BIG_QUERY_DATASET, datasetList);
    // Workspace writer cannot delete the dataset directly
    var writerCannotDeleteException = assertThrows(BigQueryException.class, () -> writerBqClient.delete(DATASET_RESOURCE_NAME));
    assertEquals(HttpStatusCodes.STATUS_CODE_FORBIDDEN, writerCannotDeleteException.getCode());
    // Workspace owner cannot delete the dataset directly
    var ownerCannotDeleteException = assertThrows(BigQueryException.class, () -> ownerBqClient.delete(DATASET_RESOURCE_NAME));
    assertEquals(HttpStatusCodes.STATUS_CODE_FORBIDDEN, ownerCannotDeleteException.getCode());
    // Workspace owner can delete the dataset through WSM
    ownerResourceApi.deleteBigQueryDataset(getWorkspaceId(), resourceId);
}
Also used : TableId(com.google.cloud.bigquery.TableId) GrantRoleRequestBody(bio.terra.workspace.model.GrantRoleRequestBody) BigQuery(com.google.cloud.bigquery.BigQuery) Table(com.google.cloud.bigquery.Table) ClonedControlledGcpBigQueryDataset(bio.terra.workspace.model.ClonedControlledGcpBigQueryDataset) Dataset(com.google.cloud.bigquery.Dataset) GcpBigQueryDatasetUpdateParameters(bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource) UpdateControlledGcpBigQueryDatasetRequestBody(bio.terra.workspace.model.UpdateControlledGcpBigQueryDatasetRequestBody) ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) ResourceApi(bio.terra.workspace.api.ResourceApi) ResourceList(bio.terra.workspace.model.ResourceList) ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) UUID(java.util.UUID) ApiException(bio.terra.workspace.client.ApiException)

Example 3 with GcpBigQueryDatasetResource

use of bio.terra.workspace.model.GcpBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class DeleteGcpContextWithControlledResource method doUserJourney.

@Override
protected void doUserJourney(TestUserSpecification testUser, WorkspaceApi workspaceApi) throws Exception {
    ControlledGcpResourceApi controlledResourceApi = ClientTestUtils.getControlledGcpResourceClient(testUser, server);
    ReferencedGcpResourceApi referencedResourceApi = ClientTestUtils.getReferencedGcpResourceClient(testUser, server);
    // Create a cloud context
    String projectId = CloudContextMaker.createGcpCloudContext(getWorkspaceId(), workspaceApi);
    logger.info("Created project {}", projectId);
    // Create a controlled BigQuery dataset
    GcpBigQueryDatasetResource controlledDataset = BqDatasetUtils.makeControlledBigQueryDatasetUserShared(controlledResourceApi, getWorkspaceId(), DATASET_RESOURCE_NAME, null, null);
    UUID controlledResourceId = controlledDataset.getMetadata().getResourceId();
    logger.info("Created controlled dataset {}", controlledResourceId);
    // Confirm the dataset was created in WSM
    GcpBigQueryDatasetResource fetchedControlledDataset = controlledResourceApi.getBigQueryDataset(getWorkspaceId(), controlledResourceId);
    assertEquals(controlledDataset, fetchedControlledDataset);
    // Create a reference to the controlled resource we just created
    String referenceName = "my-resource-name-" + UUID.randomUUID().toString();
    GcpBigQueryDatasetResource referencedDataset = BqDatasetUtils.makeBigQueryDatasetReference(controlledDataset.getAttributes(), referencedResourceApi, getWorkspaceId(), referenceName);
    // Confirm the reference was created in WSM
    GcpBigQueryDatasetResource fetchedDatasetReference = referencedResourceApi.getBigQueryDatasetReference(getWorkspaceId(), referencedDataset.getMetadata().getResourceId());
    assertEquals(referencedDataset, fetchedDatasetReference);
    // Delete the context, which should delete the controlled resource but not the reference.
    CloudContextMaker.deleteGcpCloudContext(getWorkspaceId(), workspaceApi);
    // Confirm the controlled resource was deleted.
    var noGcpContextException = assertThrows(ApiException.class, () -> controlledResourceApi.getBigQueryDataset(getWorkspaceId(), controlledResourceId));
    assertEquals(HttpStatus.SC_NOT_FOUND, noGcpContextException.getCode());
    // Confirm the referenced resource was not deleted (even though the underlying cloud resource
    // was).
    GcpBigQueryDatasetResource datasetReferenceAfterDelete = referencedResourceApi.getBigQueryDatasetReference(getWorkspaceId(), referencedDataset.getMetadata().getResourceId());
    assertEquals(referencedDataset, datasetReferenceAfterDelete);
}
Also used : ReferencedGcpResourceApi(bio.terra.workspace.api.ReferencedGcpResourceApi) ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) UUID(java.util.UUID) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource)

Example 4 with GcpBigQueryDatasetResource

use of bio.terra.workspace.model.GcpBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ReferencedBigQueryResourceLifecycle method testUpdateReferences.

private void testUpdateReferences(GcpBigQueryDatasetResource dataset, GcpBigQueryDataTableResource table, ReferencedGcpResourceApi fullAccessApi) throws Exception {
    ReferencedGcpResourceApi partialAccessApi = ClientTestUtils.getReferencedGcpResourceClient(partialAccessUser, server);
    ResourceApi partialAccessResourceApi = ClientTestUtils.getResourceClient(partialAccessUser, server);
    UUID bqDatasetResourceId = dataset.getMetadata().getResourceId();
    UUID bqTableResourceId = table.getMetadata().getResourceId();
    // Update BQ dataset's name and description
    String newDatasetName = "newDatasetName";
    String newDatasetDescription = "newDescription";
    BqDatasetUtils.updateBigQueryDatasetReference(fullAccessApi, getWorkspaceId(), bqDatasetResourceId, newDatasetName, newDatasetDescription, /*projectId=*/
    null, /*datasetId=*/
    null);
    GcpBigQueryDatasetResource datasetReferenceFirstUpdate = fullAccessApi.getBigQueryDatasetReference(getWorkspaceId(), bqDatasetResourceId);
    assertEquals(newDatasetName, datasetReferenceFirstUpdate.getMetadata().getName());
    assertEquals(newDatasetDescription, datasetReferenceFirstUpdate.getMetadata().getDescription());
    assertEquals(referencedBqTableAttributes.getDatasetId(), datasetReferenceFirstUpdate.getAttributes().getDatasetId());
    assertEquals(referencedBqTableAttributes.getProjectId(), datasetReferenceFirstUpdate.getAttributes().getProjectId());
    // {@code userWithPartialAccess} does not have access to the original dataset.
    assertFalse(partialAccessResourceApi.checkReferenceAccess(getWorkspaceId(), bqDatasetResourceId));
    // Update BQ dataset's referencing target
    // Attempt to update the referencing target but {@code userWithPartialAccess} does not have
    // access to the original dataset.
    assertThrows(ApiException.class, () -> BqDatasetUtils.updateBigQueryDatasetReference(partialAccessApi, getWorkspaceId(), bqDatasetResourceId, /*name=*/
    null, /*description=*/
    null, /*projectId=*/
    null, bqTableFromAlternateDatasetAttributes.getDatasetId()));
    BqDatasetUtils.updateBigQueryDatasetReference(fullAccessApi, getWorkspaceId(), bqDatasetResourceId, /*name=*/
    null, /*description=*/
    null, /*projectId=*/
    null, bqTableFromAlternateDatasetAttributes.getDatasetId());
    GcpBigQueryDatasetResource datasetReferenceSecondUpdate = fullAccessApi.getBigQueryDatasetReference(getWorkspaceId(), bqDatasetResourceId);
    assertEquals(newDatasetName, datasetReferenceSecondUpdate.getMetadata().getName());
    assertEquals(newDatasetDescription, datasetReferenceSecondUpdate.getMetadata().getDescription());
    assertEquals(table.getAttributes().getProjectId(), datasetReferenceSecondUpdate.getAttributes().getProjectId());
    assertEquals(bqTableFromAlternateDatasetAttributes.getDatasetId(), datasetReferenceSecondUpdate.getAttributes().getDatasetId());
    // {@code userWithPartialAccess} have access to dataset 2. Now since the reference is pointing
    // to dataset 2, the user have access to this reference now.
    assertTrue(partialAccessResourceApi.checkReferenceAccess(getWorkspaceId(), bqDatasetResourceId));
    // Update BQ data table's name and description.
    String newDataTableName = "newDataTableName";
    String newDataTableDescription = "a new description to the new data table reference";
    BqDataTableUtils.updateBigQueryDataTableReference(fullAccessApi, getWorkspaceId(), bqTableResourceId, newDataTableName, newDataTableDescription, /*projectId=*/
    null, /*datasetId=*/
    null, /*tableId=*/
    null);
    GcpBigQueryDataTableResource dataTableReferenceFirstUpdate = fullAccessApi.getBigQueryDataTableReference(getWorkspaceId(), bqTableResourceId);
    assertEquals(newDataTableName, dataTableReferenceFirstUpdate.getMetadata().getName());
    assertEquals(newDataTableDescription, dataTableReferenceFirstUpdate.getMetadata().getDescription());
    assertEquals(table.getAttributes().getProjectId(), dataTableReferenceFirstUpdate.getAttributes().getProjectId());
    assertEquals(table.getAttributes().getDatasetId(), dataTableReferenceFirstUpdate.getAttributes().getDatasetId());
    assertEquals(table.getAttributes().getDataTableId(), dataTableReferenceFirstUpdate.getAttributes().getDataTableId());
    // Update bq data table target
    // Attempt to update bq data table reference but {@code userWithPartialAccess} does not have
    // access to the bq table 2.
    assertThrows(ApiException.class, () -> BqDataTableUtils.updateBigQueryDataTableReference(partialAccessApi, getWorkspaceId(), bqTableResourceId, /*name=*/
    null, /*description=*/
    null, /*projectId=*/
    null, bqTableFromAlternateDatasetAttributes.getDatasetId(), bqTableFromAlternateDatasetAttributes.getDataTableId()));
    // Successfully update the referencing target because the {@code userWithFullAccess} has
    // access to the bq table 2.
    BqDataTableUtils.updateBigQueryDataTableReference(fullAccessApi, getWorkspaceId(), bqTableResourceId, /*name=*/
    null, /*description=*/
    null, /*projectId=*/
    null, bqTableFromAlternateDatasetAttributes.getDatasetId(), bqTableFromAlternateDatasetAttributes.getDataTableId());
    GcpBigQueryDataTableResource dataTableReferenceSecondUpdate = fullAccessApi.getBigQueryDataTableReference(getWorkspaceId(), bqTableResourceId);
    assertEquals(newDataTableName, dataTableReferenceSecondUpdate.getMetadata().getName());
    assertEquals(newDataTableDescription, dataTableReferenceSecondUpdate.getMetadata().getDescription());
    assertEquals(table.getAttributes().getProjectId(), dataTableReferenceSecondUpdate.getAttributes().getProjectId());
    assertEquals(bqTableFromAlternateDatasetAttributes.getDatasetId(), dataTableReferenceSecondUpdate.getAttributes().getDatasetId());
    assertEquals(bqTableFromAlternateDatasetAttributes.getDataTableId(), dataTableReferenceSecondUpdate.getAttributes().getDataTableId());
    BqDataTableUtils.updateBigQueryDataTableReference(fullAccessApi, getWorkspaceId(), bqTableResourceId, /*name=*/
    null, /*description=*/
    null, /*projectId=*/
    null, table.getAttributes().getDatasetId(), /*tableId=*/
    null);
    GcpBigQueryDataTableResource dataTableReferenceThirdUpdate = fullAccessApi.getBigQueryDataTableReference(getWorkspaceId(), bqTableResourceId);
    assertEquals(newDataTableName, dataTableReferenceThirdUpdate.getMetadata().getName());
    assertEquals(newDataTableDescription, dataTableReferenceThirdUpdate.getMetadata().getDescription());
    assertEquals(table.getAttributes().getProjectId(), dataTableReferenceThirdUpdate.getAttributes().getProjectId());
    assertEquals(table.getAttributes().getDatasetId(), dataTableReferenceThirdUpdate.getAttributes().getDatasetId());
    assertEquals(bqTableFromAlternateDatasetAttributes.getDataTableId(), dataTableReferenceThirdUpdate.getAttributes().getDataTableId());
    BqDataTableUtils.updateBigQueryDataTableReference(fullAccessApi, getWorkspaceId(), bqTableResourceId, /*name=*/
    null, /*description=*/
    null, /*projectId=*/
    null, /*datasetId=*/
    null, table.getAttributes().getDataTableId());
    GcpBigQueryDataTableResource dataTableReferenceFourthUpdate = fullAccessApi.getBigQueryDataTableReference(getWorkspaceId(), bqTableResourceId);
    assertEquals(newDataTableName, dataTableReferenceFourthUpdate.getMetadata().getName());
    assertEquals(newDataTableDescription, dataTableReferenceFourthUpdate.getMetadata().getDescription());
    assertEquals(table.getAttributes().getProjectId(), dataTableReferenceFourthUpdate.getAttributes().getProjectId());
    assertEquals(table.getAttributes().getDatasetId(), dataTableReferenceFourthUpdate.getAttributes().getDatasetId());
    assertEquals(table.getAttributes().getDataTableId(), dataTableReferenceFourthUpdate.getAttributes().getDataTableId());
}
Also used : ReferencedGcpResourceApi(bio.terra.workspace.api.ReferencedGcpResourceApi) ResourceApi(bio.terra.workspace.api.ResourceApi) ReferencedGcpResourceApi(bio.terra.workspace.api.ReferencedGcpResourceApi) GcpBigQueryDataTableResource(bio.terra.workspace.model.GcpBigQueryDataTableResource) UUID(java.util.UUID) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource)

Example 5 with GcpBigQueryDatasetResource

use of bio.terra.workspace.model.GcpBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ReferencedBigQueryResourceLifecycle method testGetReferences.

private void testGetReferences(GcpBigQueryDatasetResource referencedDataset, GcpBigQueryDataTableResource referencedDataTable, ReferencedGcpResourceApi referencedGcpResourceApi) throws Exception {
    // Get the references
    GcpBigQueryDatasetResource fetchedDataset = referencedGcpResourceApi.getBigQueryDatasetReference(getWorkspaceId(), bqDatasetResourceId);
    assertEquals(referencedDataset, fetchedDataset);
    GcpBigQueryDataTableResource fetchedDataTable = referencedGcpResourceApi.getBigQueryDataTableReference(getWorkspaceId(), bqDataTableResourceId);
    assertEquals(referencedDataTable, fetchedDataTable);
    // Enumerate the references
    // Any workspace member can view references in WSM, even if they can't view the underlying cloud
    // resource or contents.
    ResourceApi noAccessApi = ClientTestUtils.getResourceClient(noAccessUser, server);
    ResourceList referenceList = noAccessApi.enumerateResources(getWorkspaceId(), 0, 5, /*referenceType=*/
    null, StewardshipType.REFERENCED);
    assertEquals(2, referenceList.getResources().size());
    ResourceList datasetList = noAccessApi.enumerateResources(getWorkspaceId(), 0, 5, /*referenceType=*/
    ResourceType.BIG_QUERY_DATASET, StewardshipType.REFERENCED);
    assertEquals(1, datasetList.getResources().size());
    MultiResourcesUtils.assertResourceType(ResourceType.BIG_QUERY_DATASET, datasetList);
    ResourceList tableList = noAccessApi.enumerateResources(getWorkspaceId(), 0, 5, /*referenceType=*/
    ResourceType.BIG_QUERY_DATA_TABLE, StewardshipType.REFERENCED);
    assertEquals(1, tableList.getResources().size());
    MultiResourcesUtils.assertResourceType(ResourceType.BIG_QUERY_DATA_TABLE, tableList);
}
Also used : ReferencedGcpResourceApi(bio.terra.workspace.api.ReferencedGcpResourceApi) ResourceApi(bio.terra.workspace.api.ResourceApi) ResourceList(bio.terra.workspace.model.ResourceList) GcpBigQueryDataTableResource(bio.terra.workspace.model.GcpBigQueryDataTableResource) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource)

Aggregations

GcpBigQueryDatasetResource (bio.terra.workspace.model.GcpBigQueryDatasetResource)11 ReferencedGcpResourceApi (bio.terra.workspace.api.ReferencedGcpResourceApi)4 ResourceApi (bio.terra.workspace.api.ResourceApi)4 UUID (java.util.UUID)4 ControlledGcpResourceApi (bio.terra.workspace.api.ControlledGcpResourceApi)3 GcpBigQueryDataTableResource (bio.terra.workspace.model.GcpBigQueryDataTableResource)3 ResourceList (bio.terra.workspace.model.ResourceList)3 PDBqDataset (bio.terra.cli.serialization.persisted.resource.PDBqDataset)2 UFBqDataset (bio.terra.cli.serialization.userfacing.resource.UFBqDataset)2 ClonedControlledGcpBigQueryDataset (bio.terra.workspace.model.ClonedControlledGcpBigQueryDataset)2 GrantRoleRequestBody (bio.terra.workspace.model.GrantRoleRequestBody)2 BigQuery (com.google.cloud.bigquery.BigQuery)2 TableId (com.google.cloud.bigquery.TableId)2 ApiException (bio.terra.workspace.client.ApiException)1 CloneControlledGcpBigQueryDatasetRequest (bio.terra.workspace.model.CloneControlledGcpBigQueryDatasetRequest)1 CloneControlledGcpBigQueryDatasetResult (bio.terra.workspace.model.CloneControlledGcpBigQueryDatasetResult)1 GcpAiNotebookInstanceResource (bio.terra.workspace.model.GcpAiNotebookInstanceResource)1 GcpBigQueryDatasetUpdateParameters (bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters)1 GcpGcsBucketResource (bio.terra.workspace.model.GcpGcsBucketResource)1 JobControl (bio.terra.workspace.model.JobControl)1