use of bio.terra.model.DatasetModel in project jade-data-repo by DataBiosphere.
the class SnapshotTest method snapshotByQueryHappyPathTest.
@Test
public void snapshotByQueryHappyPathTest() throws Exception {
DatasetModel dataset = dataRepoFixtures.getDataset(steward(), datasetId);
String datasetName = dataset.getName();
SnapshotRequestModel requestModel = jsonLoader.loadObject("ingest-test-snapshot-query.json", SnapshotRequestModel.class);
// swap in the correct dataset name (with the id at the end)
requestModel.getContents().get(0).setDatasetName(datasetName);
requestModel.getContents().get(0).getQuerySpec().setQuery("SELECT " + datasetName + ".sample.datarepo_row_id FROM " + datasetName + ".sample WHERE " + datasetName + ".sample.id ='sample6'");
SnapshotSummaryModel snapshotSummary = dataRepoFixtures.createSnapshotWithRequest(steward(), datasetName, requestModel);
TimeUnit.SECONDS.sleep(10);
createdSnapshotIds.add(snapshotSummary.getId());
SnapshotModel snapshot = dataRepoFixtures.getSnapshot(steward(), snapshotSummary.getId());
assertEquals("new snapshot has been created", snapshot.getName(), requestModel.getName());
}
use of bio.terra.model.DatasetModel in project jade-data-repo by DataBiosphere.
the class DatasetIntegrationTest method wildcardSoftDelete.
@Test
public void wildcardSoftDelete() throws Exception {
datasetId = ingestedDataset();
String pathPrefix = "softDelWildcard" + UUID.randomUUID().toString();
// get 5 row ids, we'll write them out to 5 separate files
DatasetModel dataset = dataRepoFixtures.getDataset(steward(), datasetId);
BigQuery bigQuery = BigQueryFixtures.getBigQuery(dataset.getDataProject(), stewardToken);
List<String> sampleRowIds = getRowIds(bigQuery, dataset, "sample", 5L);
for (String rowId : sampleRowIds) {
writeListToScratch(pathPrefix, Collections.singletonList(rowId));
}
// make a wildcard path 'gs://ingestbucket/softDelWildcard/*'
String wildcardPath = String.format("gs://%s/scratch/%s/*", testConfiguration.getIngestbucket(), pathPrefix);
// build a request and send it off
DataDeletionRequest request = dataDeletionRequest().tables(Collections.singletonList(deletionTableFile("sample", wildcardPath)));
dataRepoFixtures.deleteData(steward(), datasetId, request);
// there should be (7 - 5) = 2 rows "visible" in the sample table
assertTableCount(bigQuery, dataset, "sample", 2L);
}
use of bio.terra.model.DatasetModel in project jade-data-repo by DataBiosphere.
the class DatasetIntegrationTest method datasetUnauthorizedPermissionsTest.
@Test
public void datasetUnauthorizedPermissionsTest() throws Exception {
dataRepoFixtures.createDatasetError(custodian(), "dataset-minimal.json", HttpStatus.UNAUTHORIZED);
dataRepoFixtures.createDatasetError(reader(), "dataset-minimal.json", HttpStatus.UNAUTHORIZED);
EnumerateDatasetModel enumDatasetsResp = dataRepoFixtures.enumerateDatasets(reader());
List<DatasetSummaryModel> items = enumDatasetsResp.getItems();
if (items != null) {
for (DatasetSummaryModel datasetModel : items) {
logger.info(String.format("found dataset for reader: %s, created: %s", datasetModel.getId(), datasetModel.getCreatedDate()));
}
}
assertThat("Reader does not have access to datasets", enumDatasetsResp.getTotal(), equalTo(0));
DatasetSummaryModel summaryModel = null;
summaryModel = dataRepoFixtures.createDataset(steward(), "dataset-minimal.json");
datasetId = summaryModel.getId();
DataRepoResponse<DatasetModel> getDatasetResp = dataRepoFixtures.getDatasetRaw(reader(), summaryModel.getId());
assertThat("Reader is not authorized to get dataset", getDatasetResp.getStatusCode(), equalTo(HttpStatus.UNAUTHORIZED));
// make sure reader cannot delete dataset
DataRepoResponse<JobModel> deleteResp1 = dataRepoFixtures.deleteDatasetLaunch(reader(), summaryModel.getId());
assertThat("Reader is not authorized to delete datasets", deleteResp1.getStatusCode(), equalTo(HttpStatus.UNAUTHORIZED));
// right now the authorization for dataset delete is done directly in the controller.
// so we need to check the response to the delete request for the unauthorized failure
// once we move the authorization for dataset delete into a separate step,
// then the check will need two parts, as below:
// check job launched successfully, check job result is failure with unauthorized
// DataRepoResponse<JobModel> jobResp1 = dataRepoFixtures.deleteDatasetLaunch(
// reader(), summaryModel.getId());
// assertTrue("dataset delete launch succeeded", jobResp1.getStatusCode().is2xxSuccessful());
// assertTrue("dataset delete launch response is present", jobResp1.getResponseObject().isPresent());
// DataRepoResponse<ErrorModel> deleteResp1 = dataRepoClient.waitForResponse(
// reader(), jobResp1, ErrorModel.class);
// assertThat("Reader is not authorized to delete datasets",
// deleteResp1.getStatusCode(),
// equalTo(HttpStatus.UNAUTHORIZED));
// make sure custodian cannot delete dataset
DataRepoResponse<JobModel> deleteResp2 = dataRepoFixtures.deleteDatasetLaunch(custodian(), summaryModel.getId());
assertThat("Custodian is not authorized to delete datasets", deleteResp2.getStatusCode(), equalTo(HttpStatus.UNAUTHORIZED));
// same comment as above for the reader() delete
// DataRepoResponse<JobModel> jobResp2 = dataRepoFixtures.deleteDatasetLaunch(
// custodian(), summaryModel.getId());
// assertTrue("dataset delete launch succeeded", jobResp2.getStatusCode().is2xxSuccessful());
// assertTrue("dataset delete launch response is present", jobResp2.getResponseObject().isPresent());
// DataRepoResponse<ErrorModel> deleteResp2 = dataRepoClient.waitForResponse(
// custodian(), jobResp2, ErrorModel.class);
// assertThat("Custodian is not authorized to delete datasets",
// deleteResp2.getStatusCode(),
// equalTo(HttpStatus.UNAUTHORIZED));
}
use of bio.terra.model.DatasetModel in project jade-data-repo by DataBiosphere.
the class DatasetIntegrationTest method testSoftDeleteHappyPath.
@Test
public void testSoftDeleteHappyPath() throws Exception {
datasetId = ingestedDataset();
// get row ids
DatasetModel dataset = dataRepoFixtures.getDataset(steward(), datasetId);
BigQuery bigQuery = BigQueryFixtures.getBigQuery(dataset.getDataProject(), stewardToken);
List<String> participantRowIds = getRowIds(bigQuery, dataset, "participant", 3L);
List<String> sampleRowIds = getRowIds(bigQuery, dataset, "sample", 2L);
// write them to GCS
String participantPath = writeListToScratch("softDel", participantRowIds);
String samplePath = writeListToScratch("softDel", sampleRowIds);
// build the deletion request with pointers to the two files with row ids to soft delete
List<DataDeletionTableModel> dataDeletionTableModels = Arrays.asList(deletionTableFile("participant", participantPath), deletionTableFile("sample", samplePath));
DataDeletionRequest request = dataDeletionRequest().tables(dataDeletionTableModels);
// send off the soft delete request
dataRepoFixtures.deleteData(steward(), datasetId, request);
// make sure the new counts make sense
assertTableCount(bigQuery, dataset, "participant", 2L);
assertTableCount(bigQuery, dataset, "sample", 5L);
}
use of bio.terra.model.DatasetModel in project jade-data-repo by DataBiosphere.
the class DatasetIntegrationTest method testAssetCreationUndo.
@Test
public void testAssetCreationUndo() throws Exception {
// create a dataset
DatasetSummaryModel summaryModel = dataRepoFixtures.createDataset(steward(), "it-dataset-omop.json");
datasetId = summaryModel.getId();
DatasetModel datasetModel = dataRepoFixtures.getDataset(steward(), summaryModel.getId());
List<AssetModel> originalAssetList = datasetModel.getSchema().getAssets();
assertThat("Asset specification is as originally expected", originalAssetList.size(), equalTo(1));
AssetModel assetModel = new AssetModel().name("assetName").rootTable("person").rootColumn("person_id").tables(Arrays.asList(DatasetFixtures.buildAssetParticipantTable(), DatasetFixtures.buildAssetSampleTable())).follow(Collections.singletonList("fpk_visit_person"));
// have the asset creation fail
// by calling the fault insertion
dataRepoFixtures.setFault(steward(), ConfigEnum.CREATE_ASSET_FAULT.name(), true);
// add an asset spec
dataRepoFixtures.addDatasetAsset(steward(), datasetModel.getId(), assetModel);
// make sure undo is completed successfully
DatasetModel datasetModelWAsset = dataRepoFixtures.getDataset(steward(), datasetModel.getId());
DatasetSpecificationModel datasetSpecificationModel = datasetModelWAsset.getSchema();
List<AssetModel> assetList = datasetSpecificationModel.getAssets();
// assert that the asset isn't there
assertThat("Additional asset specification has never been added", assetList.size(), equalTo(1));
}
Aggregations