use of org.sagebionetworks.bridge.models.upload.Upload in project BridgeServer2 by Sage-Bionetworks.
the class Exporter3ServiceTest method completeUpload_NoSharing.
@Test
public void completeUpload_NoSharing() {
// Set up inputs.
Upload upload = Upload.create();
upload.setHealthCode(TestConstants.HEALTH_CODE);
upload.setUploadId(RECORD_ID);
// Mock AccountService.
Account account = Account.create();
account.setSharingScope(SharingScope.NO_SHARING);
when(mockAccountService.getAccount(any())).thenReturn(Optional.of(account));
// Mock HealthDataEx3Service.
HealthDataRecordEx3 createdRecord = HealthDataRecordEx3.create();
createdRecord.setId(RECORD_ID);
when(mockHealthDataEx3Service.createOrUpdateRecord(any())).thenReturn(createdRecord);
// Execute.
exporter3Service.completeUpload(app, upload);
// No call to SQS.
verifyZeroInteractions(mockSqsClient);
}
use of org.sagebionetworks.bridge.models.upload.Upload in project BridgeServer2 by Sage-Bionetworks.
the class UploadService method createUpload.
public UploadSession createUpload(String appId, StudyParticipant participant, UploadRequest uploadRequest) {
Validate.entityThrowingException(UploadValidator.INSTANCE, uploadRequest);
// Check to see if upload is a dupe, and if it is, get the upload status.
String uploadMd5 = uploadRequest.getContentMd5();
DateTime uploadRequestedOn = DateUtils.getCurrentDateTime();
String originalUploadId = null;
UploadStatus originalUploadStatus = null;
// same uploads over and over again with each test run.
if (!API_APP_ID.equals(appId)) {
try {
originalUploadId = uploadDedupeDao.getDuplicate(participant.getHealthCode(), uploadMd5, uploadRequestedOn);
if (originalUploadId != null) {
Upload originalUpload = uploadDao.getUpload(originalUploadId);
originalUploadStatus = originalUpload.getStatus();
}
} catch (RuntimeException ex) {
// Don't want dedupe logic to fail the upload. Log an error and swallow the exception.
logger.error("Error deduping upload: " + ex.getMessage(), ex);
}
}
String uploadId;
if (originalUploadId != null && originalUploadStatus == UploadStatus.REQUESTED) {
// This is a dupe of a previous upload, and that previous upload is incomplete (REQUESTED). Instead of
// creating a new upload in the upload table, reactivate the old one.
uploadId = originalUploadId;
} else {
// This is a new upload.
Upload upload = uploadDao.createUpload(uploadRequest, appId, participant.getHealthCode(), originalUploadId);
uploadId = upload.getUploadId();
if (originalUploadId != null) {
// We had a dupe of a previous completed upload. Log this for future analysis.
logger.info("Detected dupe: App " + appId + ", upload " + uploadId + " is a dupe of " + originalUploadId);
} else {
try {
// Not a dupe. Register this dupe so we can detect dupes of this.
uploadDedupeDao.registerUpload(participant.getHealthCode(), uploadMd5, uploadRequestedOn, uploadId);
} catch (RuntimeException ex) {
// Don't want dedupe logic to fail the upload. Log an error and swallow the exception.
logger.error("Error registering upload " + uploadId + " in dedupe table: " + ex.getMessage(), ex);
}
}
}
// Upload ID in DynamoDB is the same as the S3 Object ID
GeneratePresignedUrlRequest presignedUrlRequest = new GeneratePresignedUrlRequest(uploadBucket, uploadId, HttpMethod.PUT);
// Expiration
final Date expiration = DateTime.now(DateTimeZone.UTC).toDate();
expiration.setTime(expiration.getTime() + EXPIRATION);
presignedUrlRequest.setExpiration(expiration);
// Temporary session credentials
AWSStaticCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(uploadCredentailsService.getSessionCredentials());
presignedUrlRequest.setRequestCredentialsProvider(credentialsProvider);
// Ask for server-side encryption
presignedUrlRequest.addRequestParameter(SERVER_SIDE_ENCRYPTION, AES_256_SERVER_SIDE_ENCRYPTION);
// Additional headers for signing
presignedUrlRequest.setContentMd5(uploadMd5);
presignedUrlRequest.setContentType(uploadRequest.getContentType());
URL url = s3UploadClient.generatePresignedUrl(presignedUrlRequest);
return new UploadSession(uploadId, url, expiration.getTime());
}
use of org.sagebionetworks.bridge.models.upload.Upload in project BridgeServer2 by Sage-Bionetworks.
the class UploadService method getUploadValidationStatus.
/**
* <p>
* Gets validation status and messages for the given upload ID. This includes the health data record, if one was
* created for the upload.
* </p>
* <p>
* user comes from the controller, and is guaranteed to be present. However, uploadId is user input and must be
* validated.
* </p>
*
* @param uploadId
* ID of upload to fetch, must be non-null and non-empty
* @return upload validation status, which includes the health data record if one was created
*/
public UploadValidationStatus getUploadValidationStatus(@Nonnull String uploadId) {
Upload upload = getUpload(uploadId);
// get record, if it exists
HealthDataRecord record = null;
String recordId = upload.getRecordId();
if (!Strings.isNullOrEmpty(recordId)) {
try {
record = healthDataService.getRecordById(recordId);
} catch (RuntimeException ex) {
// Underlying service failed to get the health data record. Log a warning, but move on.
logger.warn("Error getting record ID " + recordId + " for upload ID " + uploadId + ": " + ex.getMessage(), ex);
}
}
UploadValidationStatus validationStatus = UploadValidationStatus.from(upload, record);
return validationStatus;
}
use of org.sagebionetworks.bridge.models.upload.Upload in project BridgeServer2 by Sage-Bionetworks.
the class UploadValidationContextTest method shallowCopy.
@Test
public void shallowCopy() {
// dummy objects to test against
App app = TestUtils.getValidApp(UploadValidationContextTest.class);
Upload upload = new DynamoUpload2();
File tempDir = mock(File.class);
File dataFile = mock(File.class);
File decryptedDataFile = mock(File.class);
Map<String, File> unzippedDataFileMap = ImmutableMap.<String, File>builder().put("foo", mock(File.class)).put("bar", mock(File.class)).put("baz", mock(File.class)).build();
JsonNode infoJsonNode = BridgeObjectMapper.get().createObjectNode();
HealthDataRecord record = HealthDataRecord.create();
// create original
UploadValidationContext original = new UploadValidationContext();
original.setHealthCode(HEALTH_CODE);
original.setAppId(app.getIdentifier());
original.setUpload(upload);
original.setSuccess(false);
original.addMessage("common message");
original.setTempDir(tempDir);
original.setDataFile(dataFile);
original.setDecryptedDataFile(decryptedDataFile);
original.setUnzippedDataFileMap(unzippedDataFileMap);
original.setInfoJsonNode(infoJsonNode);
original.setHealthDataRecord(record);
original.setRecordId("test-record");
// copy and validate
UploadValidationContext copy = original.shallowCopy();
assertEquals(copy.getHealthCode(), HEALTH_CODE);
assertSame(copy.getAppId(), app.getIdentifier());
assertSame(copy.getUpload(), upload);
assertFalse(copy.getSuccess());
assertSame(copy.getTempDir(), tempDir);
assertSame(copy.getDataFile(), dataFile);
assertSame(copy.getDecryptedDataFile(), decryptedDataFile);
assertEquals(copy.getUnzippedDataFileMap(), unzippedDataFileMap);
assertSame(copy.getInfoJsonNode(), infoJsonNode);
assertSame(copy.getHealthDataRecord(), record);
assertEquals(copy.getRecordId(), "test-record");
assertEquals(copy.getMessageList().size(), 1);
assertEquals(copy.getMessageList().get(0), "common message");
// modify original and validate copy unchanged
original.setHealthCode("new-health-code");
original.addMessage("original message");
assertEquals(copy.getHealthCode(), HEALTH_CODE);
assertEquals(copy.getMessageList().size(), 1);
assertEquals(copy.getMessageList().get(0), "common message");
// modify copy and validate original unchanged
copy.setRecordId("new-record-id");
copy.addMessage("copy message");
assertEquals(original.getRecordId(), "test-record");
assertEquals(original.getMessageList().size(), 2);
assertEquals(original.getMessageList().get(0), "common message");
assertEquals(original.getMessageList().get(1), "original message");
}
use of org.sagebionetworks.bridge.models.upload.Upload in project BridgeServer2 by Sage-Bionetworks.
the class UploadValidationTaskFactoryTest method test.
@Test
public void test() {
// test dao and handlers
List<UploadValidationHandler> handlerList = Collections.emptyList();
UploadDao dao = mock(UploadDao.class);
FileHelper fileHelper = new FileHelper();
HealthDataService healthDataService = new HealthDataService();
// set up task factory
UploadValidationTaskFactory taskFactory = new UploadValidationTaskFactory();
taskFactory.setFileHelper(fileHelper);
taskFactory.setHandlerList(handlerList);
taskFactory.setUploadDao(dao);
taskFactory.setHealthDataService(healthDataService);
// inputs
App app = TestUtils.getValidApp(UploadValidationTaskFactoryTest.class);
Upload upload = Upload.create();
upload.setHealthCode(HEALTH_CODE);
// execute and validate
UploadValidationTask task = taskFactory.newTask(app.getIdentifier(), upload);
assertEquals(task.getContext().getHealthCode(), HEALTH_CODE);
assertSame(task.getContext().getAppId(), app.getIdentifier());
assertSame(task.getContext().getUpload(), upload);
assertSame(task.getFileHelper(), fileHelper);
assertSame(task.getHandlerList(), handlerList);
assertSame(task.getUploadDao(), dao);
assertSame(task.getHealthDataService(), healthDataService);
}
Aggregations