use of org.sagebionetworks.bridge.exceptions.ConcurrentModificationException in project BridgeServer2 by Sage-Bionetworks.
the class DynamoCompoundActivityDefinitionDao method updateCompoundActivityDefinition.
/**
* {@inheritDoc}
*/
@Override
public CompoundActivityDefinition updateCompoundActivityDefinition(CompoundActivityDefinition compoundActivityDefinition) {
// Currently, both a save expression and a mismatched version number will throw a
// ConditionalCheckFailedException, so it's not possible to distinguish between an EntityNotFound and a
// ConcurrentModificationException using a single atomic DDB request. Since ConcurrentModification is more
// nefarious, we'll use the atomic operation for that and use a load-before-update to check that the def
// already exists.
// Call get() to verify the def exists. This will throw an EntityNotFoundException if it doesn't exist.
String taskId = compoundActivityDefinition.getTaskId();
getCompoundActivityDefinition(compoundActivityDefinition.getAppId(), taskId);
// Call DDB mapper to save.
try {
mapper.save(compoundActivityDefinition);
} catch (ConditionalCheckFailedException ex) {
throw new ConcurrentModificationException(compoundActivityDefinition);
}
return compoundActivityDefinition;
}
use of org.sagebionetworks.bridge.exceptions.ConcurrentModificationException in project BridgeServer2 by Sage-Bionetworks.
the class DynamoCompoundActivityDefinitionDao method createCompoundActivityDefinition.
/**
* {@inheritDoc}
*/
@Override
public CompoundActivityDefinition createCompoundActivityDefinition(CompoundActivityDefinition compoundActivityDefinition) {
// Currently, all CompoundActivityDefinitions are DynamoCompoundActivityDefinitions.
DynamoCompoundActivityDefinition ddbDef = (DynamoCompoundActivityDefinition) compoundActivityDefinition;
// Clear the version. This allows people to copy-paste defs.
ddbDef.setVersion(null);
// Call DDB to create.
try {
mapper.save(ddbDef);
} catch (ConditionalCheckFailedException ex) {
throw new ConcurrentModificationException(ddbDef);
}
return ddbDef;
}
use of org.sagebionetworks.bridge.exceptions.ConcurrentModificationException in project BridgeServer2 by Sage-Bionetworks.
the class UploadService method uploadComplete.
public void uploadComplete(String appId, UploadCompletionClient completedBy, Upload upload, boolean redrive) {
String uploadId = upload.getUploadId();
// We don't want to kick off upload validation on an upload that already has upload validation.
if (!upload.canBeValidated() && !redrive) {
logger.info(String.format("uploadComplete called for upload %s, which is already complete", uploadId));
return;
}
final String objectId = upload.getObjectId();
ObjectMetadata obj;
try {
Stopwatch stopwatch = Stopwatch.createStarted();
obj = s3Client.getObjectMetadata(uploadBucket, objectId);
logger.info("Finished getting S3 metadata for bucket " + uploadBucket + " key " + objectId + " in " + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms");
} catch (AmazonS3Exception ex) {
if (ex.getStatusCode() == 404) {
throw new NotFoundException(ex);
} else {
// Only S3 404s are mapped to 404s. Everything else is an internal server error.
throw new BridgeServiceException(ex);
}
}
String sse = obj.getSSEAlgorithm();
if (!AES_256_SERVER_SIDE_ENCRYPTION.equals(sse)) {
logger.error("Missing S3 server-side encryption (SSE) for presigned upload " + uploadId + ".");
}
try {
uploadDao.uploadComplete(completedBy, upload);
} catch (ConcurrentModificationException ex) {
// The old workflow is the app calls uploadComplete. The new workflow has an S3 trigger to call
// uploadComplete. During the transition, it's very likely that this will be called twice, sometimes
// concurrently. As such, we should log and squelch the ConcurrentModificationException.
logger.info("Concurrent modification of upload " + uploadId + " while marking upload complete");
// and duplicate records.
return;
}
// kick off upload validation
App app = appService.getApp(appId);
if (BridgeUtils.isExporter3Configured(app)) {
exporter3Service.completeUpload(app, upload);
}
// For backwards compatibility, always call Legacy Exporter 2.0. In the future, we may introduce a setting to
// disable this for new apps.
uploadValidationService.validateUpload(appId, upload);
}
use of org.sagebionetworks.bridge.exceptions.ConcurrentModificationException in project BridgeServer2 by Sage-Bionetworks.
the class DynamoUploadDao method uploadComplete.
/**
* {@inheritDoc}
*/
@Override
public void uploadComplete(@Nonnull UploadCompletionClient completedBy, @Nonnull Upload upload) {
DynamoUpload2 upload2 = (DynamoUpload2) upload;
upload2.setStatus(UploadStatus.VALIDATION_IN_PROGRESS);
// TODO: If we globalize Bridge, we'll need to make this timezone configurable.
upload2.setUploadDate(LocalDate.now(BridgeConstants.LOCAL_TIME_ZONE));
upload2.setCompletedOn(DateUtils.getCurrentMillisFromEpoch());
upload2.setCompletedBy(completedBy);
try {
mapper.save(upload2);
} catch (ConditionalCheckFailedException ex) {
throw new ConcurrentModificationException("Upload " + upload.getUploadId() + " is already complete");
}
}
use of org.sagebionetworks.bridge.exceptions.ConcurrentModificationException in project BridgeServer2 by Sage-Bionetworks.
the class DynamoSurveyDaoTest method createSurveyConditionalCheckFailedException.
@SuppressWarnings("unchecked")
@Test
public void createSurveyConditionalCheckFailedException() {
DynamoSurvey survey = new DynamoSurvey();
survey.setAppId(TEST_APP_ID);
mockSurveyMapper(survey);
mockSurveyElementMapper();
when(mockSurveyElementMapper.query(eq(DynamoSurveyElement.class), any())).thenReturn(mockElementQueryList);
doThrow(new ConditionalCheckFailedException("")).when(mockSurveyMapper).save(any());
try {
dao.createSurvey(survey);
fail("Should have thrown exceptions");
} catch (ConcurrentModificationException e) {
verify(mockSurveyElementMapper, times(2)).batchDelete(any(List.class));
}
}
Aggregations