use of bio.terra.workspace.model.GcpBigQueryDataTableAttributes in project terra-workspace-manager by DataBiosphere.
the class CloneWorkspace method doSetup.
@Override
protected void doSetup(List<TestUserSpecification> testUsers, WorkspaceApi sourceOwnerWorkspaceApi) throws Exception {
super.doSetup(testUsers, sourceOwnerWorkspaceApi);
// set up 2 users
assertThat(testUsers, hasSize(2));
// user creating the source resources
final TestUserSpecification sourceOwnerUser = testUsers.get(0);
// user cloning the workspace
cloningUser = testUsers.get(1);
// Build source GCP project in main test workspace
sourceProjectId = CloudContextMaker.createGcpCloudContext(getWorkspaceId(), sourceOwnerWorkspaceApi);
logger.info("Created source project {} in workspace {}", sourceProjectId, getWorkspaceId());
// add cloning user as reader on the workspace
sourceOwnerWorkspaceApi.grantRole(new GrantRoleRequestBody().memberEmail(cloningUser.userEmail), getWorkspaceId(), IamRole.READER);
// give users resource APIs
final ControlledGcpResourceApi sourceOwnerResourceApi = ClientTestUtils.getControlledGcpResourceClient(sourceOwnerUser, server);
cloningUserResourceApi = ClientTestUtils.getControlledGcpResourceClient(cloningUser, server);
// Create a GCS bucket with data
// create source bucket with COPY_RESOURCE - should clone fine
nameSuffix = UUID.randomUUID().toString();
sharedBucketSourceResourceName = BUCKET_RESOURCE_PREFIX + nameSuffix;
sharedSourceBucket = makeControlledGcsBucketUserShared(sourceOwnerResourceApi, getWorkspaceId(), sharedBucketSourceResourceName, CloningInstructionsEnum.RESOURCE);
GcsBucketUtils.addFileToBucket(sharedSourceBucket, sourceOwnerUser, sourceProjectId);
// create a private GCS bucket, which the non-creating user can't clone
privateSourceBucket = makeControlledGcsBucketUserPrivate(sourceOwnerResourceApi, getWorkspaceId(), UUID.randomUUID().toString(), CloningInstructionsEnum.RESOURCE);
GcsBucketUtils.addFileToBucket(privateSourceBucket, sourceOwnerUser, sourceProjectId);
// create a GCS bucket with data and COPY_NOTHING instruction
sharedCopyNothingSourceBucket = makeControlledGcsBucketUserShared(sourceOwnerResourceApi, getWorkspaceId(), UUID.randomUUID().toString(), CloningInstructionsEnum.NOTHING);
GcsBucketUtils.addFileToBucket(sharedCopyNothingSourceBucket, sourceOwnerUser, sourceProjectId);
// create a GCS bucket with data and COPY_DEFINITION
copyDefinitionSourceBucket = makeControlledGcsBucketUserShared(sourceOwnerResourceApi, getWorkspaceId(), UUID.randomUUID().toString(), CloningInstructionsEnum.DEFINITION);
GcsBucketUtils.addFileToBucket(copyDefinitionSourceBucket, sourceOwnerUser, sourceProjectId);
// Create a BigQuery Dataset with tables and COPY_DEFINITION
copyDefinitionDatasetResourceName = "copy_definition_" + nameSuffix.replace('-', '_');
copyDefinitionDataset = makeControlledBigQueryDatasetUserShared(sourceOwnerResourceApi, getWorkspaceId(), copyDefinitionDatasetResourceName, null, CloningInstructionsEnum.DEFINITION);
BqDatasetUtils.populateBigQueryDataset(copyDefinitionDataset, sourceOwnerUser, sourceProjectId);
// Create a BigQuery dataset with tables and COPY_RESOURCE
copyResourceDatasetResourceName = "copy_resource_dataset";
copyResourceDataset = makeControlledBigQueryDatasetUserShared(sourceOwnerResourceApi, getWorkspaceId(), copyResourceDatasetResourceName, null, CloningInstructionsEnum.RESOURCE);
BqDatasetUtils.populateBigQueryDataset(copyResourceDataset, sourceOwnerUser, sourceProjectId);
// Create a private BQ dataset
privateDatasetResourceName = "private_dataset";
privateDataset = makeControlledBigQueryDatasetUserPrivate(sourceOwnerResourceApi, getWorkspaceId(), privateDatasetResourceName, null, CloningInstructionsEnum.RESOURCE);
// Create reference to the shared GCS bucket in this workspace with COPY_REFERENCE
ReferencedGcpResourceApi referencedGcpResourceApi = ClientTestUtils.getReferencedGcpResourceClient(sourceOwnerUser, server);
final String bucketReferenceName = RandomStringUtils.random(16, true, false);
sourceBucketReference = GcsBucketUtils.makeGcsBucketReference(sharedSourceBucket.getGcpBucket().getAttributes(), referencedGcpResourceApi, getWorkspaceId(), bucketReferenceName, CloningInstructionsEnum.REFERENCE);
GcpGcsObjectAttributes referencedFileAttributes = new GcpGcsObjectAttributes().bucketName(sharedSourceBucket.getGcpBucket().getAttributes().getBucketName()).fileName(GCS_BLOB_NAME);
sourceBucketFileReference = makeGcsObjectReference(referencedFileAttributes, referencedGcpResourceApi, getWorkspaceId(), "a_reference_to_wsmtestblob", CloningInstructionsEnum.REFERENCE);
// create reference to the shared BQ dataset with COPY_DEFINITION
sourceDatasetReference = BqDatasetUtils.makeBigQueryDatasetReference(copyDefinitionDataset.getAttributes(), referencedGcpResourceApi, getWorkspaceId(), "dataset_resource_1");
GcpBigQueryDataTableAttributes bqTableReferenceAttributes = new GcpBigQueryDataTableAttributes().projectId(copyDefinitionDataset.getAttributes().getProjectId()).datasetId(copyDefinitionDataset.getAttributes().getDatasetId()).dataTableId(BqDatasetUtils.BQ_EMPLOYEE_TABLE_NAME);
sourceDataTableReference = BqDatasetUtils.makeBigQueryDataTableReference(bqTableReferenceAttributes, referencedGcpResourceApi, getWorkspaceId(), "datatable_resource_1");
}
Aggregations