Search in sources :

Example 1 with GcpBigQueryDatasetUpdateParameters

use of bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters in project terra-workspace-manager by DataBiosphere.

the class ControlledBigQueryDatasetLifecycle method doUserJourney.

@Override
protected void doUserJourney(TestUserSpecification testUser, WorkspaceApi workspaceApi) throws Exception {
    ControlledGcpResourceApi ownerResourceApi = ClientTestUtils.getControlledGcpResourceClient(testUser, server);
    // Add a writer the source workspace. Reader is already added by the base class
    logger.info("Adding {} as writer to workspace {}", writer.userEmail, getWorkspaceId());
    workspaceApi.grantRole(new GrantRoleRequestBody().memberEmail(writer.userEmail), getWorkspaceId(), IamRole.WRITER);
    SamClientUtils.dumpResourcePolicy(testUser, server, "workspace", getWorkspaceId().toString());
    // Create a shared BigQuery dataset
    GcpBigQueryDatasetResource createdDataset = BqDatasetUtils.makeControlledBigQueryDatasetUserShared(ownerResourceApi, getWorkspaceId(), DATASET_RESOURCE_NAME, /*datasetId=*/
    null, /*cloningInstructions=*/
    null);
    assertEquals(DATASET_RESOURCE_NAME, createdDataset.getAttributes().getDatasetId());
    UUID resourceId = createdDataset.getMetadata().getResourceId();
    // Retrieve the dataset resource
    logger.info("Retrieving dataset resource id {}", resourceId.toString());
    GcpBigQueryDatasetResource fetchedResource = ownerResourceApi.getBigQueryDataset(getWorkspaceId(), resourceId);
    assertEquals(createdDataset, fetchedResource);
    assertEquals(DATASET_RESOURCE_NAME, fetchedResource.getAttributes().getDatasetId());
    createControlledDatasetWithBothResourceNameAndDatasetIdSpecified(ownerResourceApi);
    BigQuery ownerBqClient = ClientTestUtils.getGcpBigQueryClient(testUser, getSourceProjectId());
    BigQuery writerBqClient = ClientTestUtils.getGcpBigQueryClient(writer, getSourceProjectId());
    BigQuery readerBqClient = ClientTestUtils.getGcpBigQueryClient(getWorkspaceReader(), getSourceProjectId());
    // Workspace owner can create a table in this dataset
    Table table = createTable(ownerBqClient, getSourceProjectId());
    String tableName = table.getTableId().getTable();
    // Workspace reader can read the table
    // This is the reader's first use of cloud APIs after being added to the workspace, so we
    // retry this operation until cloud IAM has properly synced.
    var readTable = ClientTestUtils.getWithRetryOnException(() -> readerBqClient.getTable(table.getTableId()));
    assertEquals(table, readTable);
    logger.info("Read table {} as workspace reader", tableName);
    // Workspace reader cannot modify tables
    Table readerUpdatedTable = table.toBuilder().setDescription("A new table description").build();
    assertThrows(BigQueryException.class, () -> readerBqClient.update(readerUpdatedTable), "Workspace reader was able to modify table metadata");
    logger.info("Workspace reader could not modify table {} metadata as expected", tableName);
    // Workspace reader cannot write data to tables
    assertThrows(BigQueryException.class, () -> insertValueIntoTable(readerBqClient, "some value"), "Workspace reader was able to insert data into a table");
    logger.info("Workspace reader could not modify table {} contents as expected", tableName);
    // Workspace writer can also read the table
    // This is the writer's first use of cloud APIs after being added to the workspace, so we
    // retry this operation until cloud IAM has properly synced.
    var writerReadTable = ClientTestUtils.getWithRetryOnException(() -> writerBqClient.getTable(table.getTableId()));
    assertEquals(table, writerReadTable);
    logger.info("Read table {} as workspace writer", tableName);
    // In contrast, a workspace writer can write data to tables
    String columnValue = "this value lives in a table";
    insertValueIntoTable(writerBqClient, columnValue);
    logger.info("Workspace writer wrote a row to table {}", tableName);
    // Create a dataset to hold query results in the destination project.
    ControlledGcpResourceApi readerResourceApi = ClientTestUtils.getControlledGcpResourceClient(getWorkspaceReader(), server);
    String resultDatasetId = "temporary_result_dataset";
    GcpBigQueryDatasetResource temporaryResultDataset = BqDatasetUtils.makeControlledBigQueryDatasetUserShared(readerResourceApi, getDestinationWorkspaceId(), "temporary_result_resource", resultDatasetId, CloningInstructionsEnum.NOTHING);
    // The table does not exist yet, but will be created to hold query results.
    TableId resultTableId = TableId.of(getDestinationProjectId(), resultDatasetId, BqDatasetUtils.BQ_RESULT_TABLE_NAME);
    // Workspace reader can now read the row inserted above
    assertEquals(columnValue, readValueFromTable(readerBqClient, resultTableId));
    logger.info("Workspace reader read that row from table {}", tableName);
    // Workspace writer can update the table metadata
    String newDescription = "Another new table description";
    Table writerUpdatedTable = table.toBuilder().setDescription(newDescription).build();
    Table updatedTable = writerBqClient.update(writerUpdatedTable);
    assertEquals(newDescription, updatedTable.getDescription());
    logger.info("Workspace writer modified table {} metadata", tableName);
    // Workspace owner can update the dataset resource through WSM
    String resourceDescription = "a description for WSM";
    Integer defaultTableLifetimeSec = 5400;
    var updateDatasetRequest = new UpdateControlledGcpBigQueryDatasetRequestBody().description(resourceDescription).updateParameters(new GcpBigQueryDatasetUpdateParameters().defaultTableLifetime(defaultTableLifetimeSec));
    ownerResourceApi.updateBigQueryDataset(updateDatasetRequest, getWorkspaceId(), resourceId);
    var datasetAfterUpdate = ownerResourceApi.getBigQueryDataset(getWorkspaceId(), resourceId);
    assertEquals(datasetAfterUpdate.getMetadata().getDescription(), resourceDescription);
    logger.info("Workspace owner updated resource {}", resourceId);
    // However, invalid updates are rejected.
    String invalidName = "!!!invalid_name!!!";
    var invalidUpdateDatasetRequest = new UpdateControlledGcpBigQueryDatasetRequestBody().name(invalidName);
    ApiException invalidUpdateEx = assertThrows(ApiException.class, () -> ownerResourceApi.updateBigQueryDataset(invalidUpdateDatasetRequest, getWorkspaceId(), resourceId));
    assertEquals(HttpStatusCodes.STATUS_CODE_BAD_REQUEST, invalidUpdateEx.getCode());
    // Cloud metadata matches the updated values
    Dataset cloudDataset = ownerBqClient.getDataset(DatasetId.of(getSourceProjectId(), DATASET_RESOURCE_NAME));
    assertEquals(defaultTableLifetimeSec * 1000L, cloudDataset.getDefaultTableLifetime());
    assertNull(cloudDataset.getDefaultPartitionExpirationMs());
    // Workspace writer can delete the table we created earlier
    logger.info("Deleting table {} from dataset {}", table.getTableId().getTable(), DATASET_RESOURCE_NAME);
    assertTrue(writerBqClient.delete(TableId.of(getSourceProjectId(), DATASET_RESOURCE_NAME, table.getTableId().getTable())));
    // Workspace reader can clean up the results table and dataset before cloning
    readerResourceApi.deleteBigQueryDataset(getDestinationWorkspaceId(), temporaryResultDataset.getMetadata().getResourceId());
    // Populate dataset with additional tables to verify cloning behavior
    BqDatasetUtils.populateBigQueryDataset(createdDataset, testUser, getSourceProjectId());
    // Verify workspace reader is able to clone the resource they can read
    testCloneBigQueryDataset(createdDataset, getWorkspaceReader(), readerResourceApi);
    // The reader should be able to enumerate the dataset.
    ResourceApi readerApi = ClientTestUtils.getResourceClient(getWorkspaceReader(), server);
    ResourceList datasetList = readerApi.enumerateResources(getWorkspaceId(), 0, 5, ResourceType.BIG_QUERY_DATASET, StewardshipType.CONTROLLED);
    assertEquals(1, datasetList.getResources().size());
    MultiResourcesUtils.assertResourceType(ResourceType.BIG_QUERY_DATASET, datasetList);
    // Workspace writer cannot delete the dataset directly
    var writerCannotDeleteException = assertThrows(BigQueryException.class, () -> writerBqClient.delete(DATASET_RESOURCE_NAME));
    assertEquals(HttpStatusCodes.STATUS_CODE_FORBIDDEN, writerCannotDeleteException.getCode());
    // Workspace owner cannot delete the dataset directly
    var ownerCannotDeleteException = assertThrows(BigQueryException.class, () -> ownerBqClient.delete(DATASET_RESOURCE_NAME));
    assertEquals(HttpStatusCodes.STATUS_CODE_FORBIDDEN, ownerCannotDeleteException.getCode());
    // Workspace owner can delete the dataset through WSM
    ownerResourceApi.deleteBigQueryDataset(getWorkspaceId(), resourceId);
}
Also used : TableId(com.google.cloud.bigquery.TableId) GrantRoleRequestBody(bio.terra.workspace.model.GrantRoleRequestBody) BigQuery(com.google.cloud.bigquery.BigQuery) Table(com.google.cloud.bigquery.Table) ClonedControlledGcpBigQueryDataset(bio.terra.workspace.model.ClonedControlledGcpBigQueryDataset) Dataset(com.google.cloud.bigquery.Dataset) GcpBigQueryDatasetUpdateParameters(bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters) GcpBigQueryDatasetResource(bio.terra.workspace.model.GcpBigQueryDatasetResource) UpdateControlledGcpBigQueryDatasetRequestBody(bio.terra.workspace.model.UpdateControlledGcpBigQueryDatasetRequestBody) ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) ResourceApi(bio.terra.workspace.api.ResourceApi) ResourceList(bio.terra.workspace.model.ResourceList) ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) UUID(java.util.UUID) ApiException(bio.terra.workspace.client.ApiException)

Example 2 with GcpBigQueryDatasetUpdateParameters

use of bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters in project terra-cli by DataBiosphere.

the class WorkspaceManagerService method updateControlledBigQueryDataset.

/**
 * Call the Workspace Manager POST
 * "/api/workspaces/v1/{workspaceId}/resources/controlled/gcp/bqdatasets/{resourceId}" endpoint to
 * update a BigQuery dataset controlled resource in the workspace.
 *
 * @param workspaceId the workspace where the resource exists
 * @param resourceId the resource id
 * @param updateParams resource properties to update
 */
public void updateControlledBigQueryDataset(UUID workspaceId, UUID resourceId, UpdateControlledBqDatasetParams updateParams) {
    // convert the CLI object to a WSM request object
    UpdateControlledGcpBigQueryDatasetRequestBody updateRequest = new UpdateControlledGcpBigQueryDatasetRequestBody().name(updateParams.resourceFields.name).description(updateParams.resourceFields.description).updateParameters(new GcpBigQueryDatasetUpdateParameters().defaultPartitionLifetime(updateParams.defaultPartitionLifetimeSeconds).defaultTableLifetime(updateParams.defaultTableLifetimeSeconds));
    callWithRetries(() -> new ControlledGcpResourceApi(apiClient).updateBigQueryDataset(updateRequest, workspaceId, resourceId), "Error updating controlled BigQuery dataset in the workspace.");
}
Also used : UpdateControlledGcpBigQueryDatasetRequestBody(bio.terra.workspace.model.UpdateControlledGcpBigQueryDatasetRequestBody) ControlledGcpResourceApi(bio.terra.workspace.api.ControlledGcpResourceApi) GcpBigQueryDatasetUpdateParameters(bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters)

Aggregations

ControlledGcpResourceApi (bio.terra.workspace.api.ControlledGcpResourceApi)2 GcpBigQueryDatasetUpdateParameters (bio.terra.workspace.model.GcpBigQueryDatasetUpdateParameters)2 UpdateControlledGcpBigQueryDatasetRequestBody (bio.terra.workspace.model.UpdateControlledGcpBigQueryDatasetRequestBody)2 ResourceApi (bio.terra.workspace.api.ResourceApi)1 ApiException (bio.terra.workspace.client.ApiException)1 ClonedControlledGcpBigQueryDataset (bio.terra.workspace.model.ClonedControlledGcpBigQueryDataset)1 GcpBigQueryDatasetResource (bio.terra.workspace.model.GcpBigQueryDatasetResource)1 GrantRoleRequestBody (bio.terra.workspace.model.GrantRoleRequestBody)1 ResourceList (bio.terra.workspace.model.ResourceList)1 BigQuery (com.google.cloud.bigquery.BigQuery)1 Dataset (com.google.cloud.bigquery.Dataset)1 Table (com.google.cloud.bigquery.Table)1 TableId (com.google.cloud.bigquery.TableId)1 UUID (java.util.UUID)1