Search in sources :

Example 16 with ControlledBigQueryDatasetResource

use of bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ControlledResourceServiceTest method createBqDatasetUndo.

@Test
@DisabledIfEnvironmentVariable(named = "TEST_ENV", matches = BUFFER_SERVICE_DISABLED_ENVS_REG_EX)
void createBqDatasetUndo() throws Exception {
    String datasetId = ControlledResourceFixtures.uniqueDatasetId();
    String location = "us-central1";
    ApiGcpBigQueryDatasetCreationParameters creationParameters = new ApiGcpBigQueryDatasetCreationParameters().datasetId(datasetId).location(location);
    ControlledBigQueryDatasetResource resource = ControlledResourceFixtures.makeDefaultControlledBigQueryBuilder(workspace.getWorkspaceId()).datasetName(datasetId).build();
    // Test idempotency of datatset-specific undo step by retrying once.
    Map<String, StepStatus> retrySteps = new HashMap<>();
    retrySteps.put(CreateBigQueryDatasetStep.class.getName(), StepStatus.STEP_RESULT_FAILURE_RETRY);
    jobService.setFlightDebugInfoForTest(FlightDebugInfo.newBuilder().lastStepFailure(true).undoStepFailures(retrySteps).build());
    // Service methods which wait for a flight to complete will throw an
    // InvalidResultStateException when that flight fails without a cause, which occurs when a
    // flight fails via debugInfo.
    assertThrows(InvalidResultStateException.class, () -> controlledResourceService.createControlledResourceSync(resource, null, user.getAuthenticatedRequest(), creationParameters));
    BigQueryCow bqCow = crlService.createWsmSaBigQueryCow();
    GoogleJsonResponseException getException = assertThrows(GoogleJsonResponseException.class, () -> bqCow.datasets().get(projectId, resource.getDatasetName()).execute());
    assertEquals(HttpStatus.NOT_FOUND.value(), getException.getStatusCode());
    assertThrows(ResourceNotFoundException.class, () -> controlledResourceService.getControlledResource(workspace.getWorkspaceId(), resource.getResourceId(), user.getAuthenticatedRequest()));
}
Also used : GoogleJsonResponseException(com.google.api.client.googleapis.json.GoogleJsonResponseException) HashMap(java.util.HashMap) ApiGcpBigQueryDatasetCreationParameters(bio.terra.workspace.generated.model.ApiGcpBigQueryDatasetCreationParameters) StepStatus(bio.terra.stairway.StepStatus) ControlledBigQueryDatasetResource(bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource) BigQueryCow(bio.terra.cloudres.google.bigquery.BigQueryCow) CreateBigQueryDatasetStep(bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.CreateBigQueryDatasetStep) Test(org.junit.jupiter.api.Test) BaseConnectedTest(bio.terra.workspace.common.BaseConnectedTest) DisabledIfEnvironmentVariable(org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable)

Example 17 with ControlledBigQueryDatasetResource

use of bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ControlledResourceServiceTest method updateBqDatasetWithUndefinedExpirationTimes.

@Test
@DisabledIfEnvironmentVariable(named = "TEST_ENV", matches = BUFFER_SERVICE_DISABLED_ENVS_REG_EX)
void updateBqDatasetWithUndefinedExpirationTimes() throws Exception {
    // create the dataset, with expiration times initially defined
    String datasetId = ControlledResourceFixtures.uniqueDatasetId();
    String location = "us-central1";
    Integer initialDefaultTableLifetime = 4800;
    Integer initialDefaultPartitionLifetime = 4801;
    ApiGcpBigQueryDatasetCreationParameters creationParameters = new ApiGcpBigQueryDatasetCreationParameters().datasetId(datasetId).location(location).defaultTableLifetime(initialDefaultTableLifetime).defaultPartitionLifetime(initialDefaultPartitionLifetime);
    ControlledBigQueryDatasetResource resource = ControlledResourceFixtures.makeDefaultControlledBigQueryBuilder(workspace.getWorkspaceId()).datasetName(datasetId).build();
    ControlledBigQueryDatasetResource createdDataset = controlledResourceService.createControlledResourceSync(resource, null, user.getAuthenticatedRequest(), creationParameters).castByEnum(WsmResourceType.CONTROLLED_GCP_BIG_QUERY_DATASET);
    // check the expiration times stored on the cloud are defined
    validateBigQueryDatasetCloudMetadata(projectId, createdDataset.getDatasetName(), location, initialDefaultTableLifetime, initialDefaultPartitionLifetime);
    // make an update request to set the expiration times to undefined values
    ApiGcpBigQueryDatasetUpdateParameters updateParameters = new ApiGcpBigQueryDatasetUpdateParameters().defaultTableLifetime(0).defaultPartitionLifetime(0);
    controlledResourceService.updateBqDataset(resource, updateParameters, user.getAuthenticatedRequest(), null, null);
    // check the expiration times stored on the cloud are now undefined
    validateBigQueryDatasetCloudMetadata(projectId, createdDataset.getDatasetName(), location, null, null);
    // update just one expiration time back to a defined value
    Integer newDefaultTableLifetime = 3600;
    updateParameters = new ApiGcpBigQueryDatasetUpdateParameters().defaultTableLifetime(newDefaultTableLifetime);
    controlledResourceService.updateBqDataset(resource, updateParameters, user.getAuthenticatedRequest(), null, null);
    // check there is one defined and one undefined expiration value
    validateBigQueryDatasetCloudMetadata(projectId, createdDataset.getDatasetName(), location, newDefaultTableLifetime, null);
    // update the other expiration time back to a defined value
    Integer newDefaultPartitionLifetime = 3601;
    updateParameters = new ApiGcpBigQueryDatasetUpdateParameters().defaultPartitionLifetime(newDefaultPartitionLifetime);
    controlledResourceService.updateBqDataset(resource, updateParameters, user.getAuthenticatedRequest(), null, null);
    // check the expiration times stored on the cloud are both defined again
    validateBigQueryDatasetCloudMetadata(projectId, createdDataset.getDatasetName(), location, newDefaultTableLifetime, newDefaultPartitionLifetime);
}
Also used : ApiGcpBigQueryDatasetUpdateParameters(bio.terra.workspace.generated.model.ApiGcpBigQueryDatasetUpdateParameters) ApiGcpBigQueryDatasetCreationParameters(bio.terra.workspace.generated.model.ApiGcpBigQueryDatasetCreationParameters) ControlledBigQueryDatasetResource(bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource) Test(org.junit.jupiter.api.Test) BaseConnectedTest(bio.terra.workspace.common.BaseConnectedTest) DisabledIfEnvironmentVariable(org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable)

Example 18 with ControlledBigQueryDatasetResource

use of bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ControlledGcpResourceApiController method updateBigQueryDataset.

@Override
public ResponseEntity<ApiGcpBigQueryDatasetResource> updateBigQueryDataset(UUID workspaceId, UUID resourceId, ApiUpdateControlledGcpBigQueryDatasetRequestBody body) {
    logger.info("Updating dataset resourceId {} workspaceId {}", resourceId, workspaceId);
    final AuthenticatedUserRequest userRequest = getAuthenticatedInfo();
    final ControlledBigQueryDatasetResource resource = controlledResourceService.getControlledResource(workspaceId, resourceId, userRequest).castByEnum(WsmResourceType.CONTROLLED_GCP_BIG_QUERY_DATASET);
    controlledResourceService.updateBqDataset(resource, body.getUpdateParameters(), userRequest, body.getName(), body.getDescription());
    final ControlledBigQueryDatasetResource updatedResource = controlledResourceService.getControlledResource(workspaceId, resourceId, userRequest).castByEnum(WsmResourceType.CONTROLLED_GCP_BIG_QUERY_DATASET);
    return new ResponseEntity<>(updatedResource.toApiResource(), HttpStatus.OK);
}
Also used : ResponseEntity(org.springframework.http.ResponseEntity) AuthenticatedUserRequest(bio.terra.workspace.service.iam.AuthenticatedUserRequest) ControlledBigQueryDatasetResource(bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource)

Example 19 with ControlledBigQueryDatasetResource

use of bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class ResourceDaoTest method duplicateBigQueryDatasetRejected.

@Test
public void duplicateBigQueryDatasetRejected() {
    String datasetName1 = "dataset1";
    String projectId1 = "projectId1";
    String projectId2 = "projectId2";
    final UUID workspaceId1 = createGcpWorkspace();
    final ControlledBigQueryDatasetResource initialResource = ControlledBigQueryDatasetResource.builder().common(ControlledResourceFixtures.makeDefaultControlledResourceFields(workspaceId1)).projectId(projectId1).datasetName(datasetName1).build();
    resourceDao.createControlledResource(initialResource);
    final UUID workspaceId2 = createGcpWorkspace();
    // This is in a different workspace (and so a different cloud context), so it is not a conflict
    // even with the same Dataset ID.
    final ControlledBigQueryDatasetResource uniqueResource = ControlledBigQueryDatasetResource.builder().common(ControlledResourceFixtures.makeDefaultControlledResourceFields(workspaceId2)).datasetName(datasetName1).projectId(projectId2).build();
    resourceDao.createControlledResource(uniqueResource);
    // This is in the same workspace as initialResource, so it should be a conflict.
    final ControlledBigQueryDatasetResource duplicatingResource = ControlledBigQueryDatasetResource.builder().common(ControlledResourceFixtures.makeDefaultControlledResourceFields(workspaceId1)).projectId(projectId1).datasetName(datasetName1).build();
    assertThrows(DuplicateResourceException.class, () -> resourceDao.createControlledResource(duplicatingResource));
    // clean up
    resourceDao.deleteResource(initialResource.getWorkspaceId(), initialResource.getResourceId());
    resourceDao.deleteResource(uniqueResource.getWorkspaceId(), uniqueResource.getResourceId());
    resourceDao.deleteResource(duplicatingResource.getWorkspaceId(), duplicatingResource.getResourceId());
}
Also used : UUID(java.util.UUID) ControlledBigQueryDatasetResource(bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource) Test(org.junit.jupiter.api.Test) BaseUnitTest(bio.terra.workspace.common.BaseUnitTest)

Example 20 with ControlledBigQueryDatasetResource

use of bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource in project terra-workspace-manager by DataBiosphere.

the class CopyBigQueryDatasetDefinitionStep method doStep.

@Override
public StepResult doStep(FlightContext flightContext) throws InterruptedException, RetryException {
    final FlightMap inputParameters = flightContext.getInputParameters();
    final FlightMap workingMap = flightContext.getWorkingMap();
    final CloningInstructions effectiveCloningInstructions = inputParameters.get(ControlledResourceKeys.CLONING_INSTRUCTIONS, CloningInstructions.class);
    // TODO: handle cloning a controlled resource with REFERENCE option, PF-812
    if (CloningInstructions.COPY_NOTHING.equals(effectiveCloningInstructions) || CloningInstructions.COPY_REFERENCE.equals(effectiveCloningInstructions)) {
        // nothing further to do here or on following steps
        // Build an empty response object
        final ApiClonedControlledGcpBigQueryDataset result = new ApiClonedControlledGcpBigQueryDataset().dataset(null).sourceWorkspaceId(sourceDataset.getWorkspaceId()).sourceResourceId(sourceDataset.getResourceId()).effectiveCloningInstructions(effectiveCloningInstructions.toApiModel());
        FlightUtils.setResponse(flightContext, result, HttpStatus.OK);
        return StepResult.getStepResultSuccess();
    }
    final String resourceName = FlightUtils.getInputParameterOrWorkingValue(flightContext, ResourceKeys.RESOURCE_NAME, ResourceKeys.PREVIOUS_RESOURCE_NAME, String.class);
    final String description = FlightUtils.getInputParameterOrWorkingValue(flightContext, ResourceKeys.RESOURCE_DESCRIPTION, ResourceKeys.PREVIOUS_RESOURCE_DESCRIPTION, String.class);
    final String datasetName = Optional.ofNullable(inputParameters.get(ControlledResourceKeys.DESTINATION_DATASET_NAME, String.class)).orElse(sourceDataset.getDatasetName());
    workingMap.put(ControlledResourceKeys.DESTINATION_DATASET_NAME, datasetName);
    final UUID destinationWorkspaceId = inputParameters.get(ControlledResourceKeys.DESTINATION_WORKSPACE_ID, UUID.class);
    final String location = FlightUtils.getInputParameterOrWorkingValue(flightContext, ControlledResourceKeys.LOCATION, ControlledResourceKeys.LOCATION, String.class);
    final String destinationProjectId = gcpCloudContextService.getRequiredGcpProject(destinationWorkspaceId);
    final ControlledResourceFields commonFields = ControlledResourceFields.builder().accessScope(sourceDataset.getAccessScope()).assignedUser(sourceDataset.getAssignedUser().orElse(null)).cloningInstructions(sourceDataset.getCloningInstructions()).description(description).managedBy(sourceDataset.getManagedBy()).name(resourceName).resourceId(UUID.randomUUID()).workspaceId(destinationWorkspaceId).build();
    final ControlledBigQueryDatasetResource destinationResource = ControlledBigQueryDatasetResource.builder().projectId(destinationProjectId).datasetName(datasetName).common(commonFields).build();
    final ApiGcpBigQueryDatasetCreationParameters creationParameters = new ApiGcpBigQueryDatasetCreationParameters().datasetId(datasetName).location(location);
    final ControlledResourceIamRole iamRole = IamRoleUtils.getIamRoleForAccessScope(destinationResource.getAccessScope());
    final ControlledBigQueryDatasetResource clonedResource = controlledResourceService.createControlledResourceSync(destinationResource, iamRole, userRequest, creationParameters).castByEnum(WsmResourceType.CONTROLLED_GCP_BIG_QUERY_DATASET);
    workingMap.put(ControlledResourceKeys.CLONED_RESOURCE_DEFINITION, clonedResource);
    final ApiClonedControlledGcpBigQueryDataset apiResult = new ApiClonedControlledGcpBigQueryDataset().dataset(clonedResource.toApiResource()).effectiveCloningInstructions(effectiveCloningInstructions.toApiModel()).sourceWorkspaceId(sourceDataset.getWorkspaceId()).sourceResourceId(sourceDataset.getResourceId());
    workingMap.put(ControlledResourceKeys.CLONE_DEFINITION_RESULT, apiResult);
    if (CloningInstructions.COPY_DEFINITION.equals(effectiveCloningInstructions) || CloningInstructions.COPY_RESOURCE.equals(effectiveCloningInstructions)) {
        // Later steps, if any, don't change the success response, since they only affect
        // internal tables and rows in the dataset.
        FlightUtils.setResponse(flightContext, apiResult, HttpStatus.OK);
    }
    return StepResult.getStepResultSuccess();
}
Also used : CloningInstructions(bio.terra.workspace.service.resource.model.CloningInstructions) FlightMap(bio.terra.stairway.FlightMap) ControlledResourceIamRole(bio.terra.workspace.service.iam.model.ControlledResourceIamRole) ApiGcpBigQueryDatasetCreationParameters(bio.terra.workspace.generated.model.ApiGcpBigQueryDatasetCreationParameters) UUID(java.util.UUID) ControlledResourceFields(bio.terra.workspace.service.resource.controlled.model.ControlledResourceFields) ControlledBigQueryDatasetResource(bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource) ApiClonedControlledGcpBigQueryDataset(bio.terra.workspace.generated.model.ApiClonedControlledGcpBigQueryDataset)

Aggregations

ControlledBigQueryDatasetResource (bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.ControlledBigQueryDatasetResource)20 Test (org.junit.jupiter.api.Test)15 BaseConnectedTest (bio.terra.workspace.common.BaseConnectedTest)12 DisabledIfEnvironmentVariable (org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable)12 ApiGcpBigQueryDatasetCreationParameters (bio.terra.workspace.generated.model.ApiGcpBigQueryDatasetCreationParameters)11 StepStatus (bio.terra.stairway.StepStatus)7 HashMap (java.util.HashMap)7 ApiGcpBigQueryDatasetUpdateParameters (bio.terra.workspace.generated.model.ApiGcpBigQueryDatasetUpdateParameters)5 AuthenticatedUserRequest (bio.terra.workspace.service.iam.AuthenticatedUserRequest)5 UUID (java.util.UUID)5 BigQueryCow (bio.terra.cloudres.google.bigquery.BigQueryCow)4 FlightMap (bio.terra.stairway.FlightMap)4 FlightDebugInfo (bio.terra.stairway.FlightDebugInfo)3 FlightState (bio.terra.stairway.FlightState)3 BaseUnitTest (bio.terra.workspace.common.BaseUnitTest)3 ControlledResourceFields (bio.terra.workspace.service.resource.controlled.model.ControlledResourceFields)3 Workspace (bio.terra.workspace.service.workspace.model.Workspace)3 GoogleJsonResponseException (com.google.api.client.googleapis.json.GoogleJsonResponseException)3 ResponseEntity (org.springframework.http.ResponseEntity)3 CreateBigQueryDatasetStep (bio.terra.workspace.service.resource.controlled.cloud.gcp.bqdataset.CreateBigQueryDatasetStep)2