use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class DatasetPage method isLockedFromEdits.
/**
* Authors are not allowed to edit but curators are allowed - when Dataset is inReview
* For all other locks edit should be locked for all editors.
*/
public boolean isLockedFromEdits() {
if (null == lockedFromEditsVar) {
try {
permissionService.checkEditDatasetLock(dataset, dvRequestService.getDataverseRequest(), new UpdateDatasetCommand(dataset, dvRequestService.getDataverseRequest()));
lockedFromEditsVar = false;
} catch (IllegalCommandException ex) {
lockedFromEditsVar = true;
}
}
return lockedFromEditsVar;
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class DatasetPage method save.
public String save() {
// Validate
Set<ConstraintViolation> constraintViolations = workingVersion.validate();
if (!constraintViolations.isEmpty()) {
// JsfHelper.addFlashMessage(JH.localize("dataset.message.validationError"));
JH.addMessage(FacesMessage.SEVERITY_ERROR, JH.localize("dataset.message.validationError"));
// FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Validation Error", "See below for details."));
return "";
}
// Use the API to save the dataset:
Command<Dataset> cmd;
try {
if (editMode == EditMode.CREATE) {
if (selectedTemplate != null) {
if (isSessionUserAuthenticated()) {
cmd = new CreateDatasetCommand(dataset, dvRequestService.getDataverseRequest(), false, null, selectedTemplate);
} else {
JH.addMessage(FacesMessage.SEVERITY_FATAL, JH.localize("dataset.create.authenticatedUsersOnly"));
return null;
}
} else {
cmd = new CreateDatasetCommand(dataset, dvRequestService.getDataverseRequest());
}
} else {
cmd = new UpdateDatasetCommand(dataset, dvRequestService.getDataverseRequest(), filesToBeDeleted);
((UpdateDatasetCommand) cmd).setValidateLenient(true);
}
dataset = commandEngine.submit(cmd);
if (editMode == EditMode.CREATE) {
if (session.getUser() instanceof AuthenticatedUser) {
userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), dataset.getCreateDate(), UserNotification.Type.CREATEDS, dataset.getLatestVersion().getId());
}
}
logger.fine("Successfully executed SaveDatasetCommand.");
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex).append(" ");
error.append(ex.getMessage()).append(" ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause).append(" ");
error.append(cause.getMessage()).append(" ");
}
logger.log(Level.FINE, "Couldn''t save dataset: {0}", error.toString());
populateDatasetUpdateFailureMessage();
return returnToDraftVersion();
} catch (CommandException ex) {
// FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString()));
logger.severe("CommandException, when attempting to update the dataset: " + ex.getMessage());
populateDatasetUpdateFailureMessage();
return returnToDraftVersion();
}
newFiles.clear();
if (editMode != null) {
if (editMode.equals(EditMode.CREATE)) {
JsfHelper.addSuccessMessage(JH.localize("dataset.message.createSuccess"));
}
if (editMode.equals(EditMode.METADATA)) {
JsfHelper.addSuccessMessage(JH.localize("dataset.message.metadataSuccess"));
}
if (editMode.equals(EditMode.LICENSE)) {
JsfHelper.addSuccessMessage(JH.localize("dataset.message.termsSuccess"));
}
if (editMode.equals(EditMode.FILE)) {
JsfHelper.addSuccessMessage(JH.localize("dataset.message.filesSuccess"));
}
} else {
// must have been a bulk file update or delete:
if (bulkFileDeleteInProgress) {
JsfHelper.addSuccessMessage(JH.localize("dataset.message.bulkFileDeleteSuccess"));
} else {
JsfHelper.addSuccessMessage(JH.localize("dataset.message.bulkFileUpdateSuccess"));
}
}
editMode = null;
bulkFileDeleteInProgress = false;
// Call Ingest Service one more time, to
// queue the data ingest jobs for asynchronous execution:
ingestService.startIngestJobs(dataset, (AuthenticatedUser) session.getUser());
logger.fine("Redirecting to the Dataset page.");
return returnToDraftVersion();
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class CollectionListManagerImpl method listCollectionContents.
@Override
public Feed listCollectionContents(IRI iri, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordAuthException, SwordError {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, request);
urlManager.processUrl(iri.toString());
String dvAlias = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("dataverse") && dvAlias != null) {
Dataverse dv = dataverseService.findByAlias(dvAlias);
if (dv != null) {
/**
* We'll say having AddDataset is enough to use this API
* endpoint, which means you are a Contributor to that
* dataverse. If we let just anyone call this endpoint, they
* will be able to see if the supplied dataverse is published or
* not.
*/
if (!permissionService.requestOn(dvReq, dv).has(Permission.AddDataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to list datasets in dataverse " + dv.getAlias());
}
Abdera abdera = new Abdera();
Feed feed = abdera.newFeed();
feed.setTitle(dv.getName());
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(iri.toString());
List<Dataset> datasets = datasetService.findByOwnerId(dv.getId());
for (Dataset dataset : datasets) {
/**
* @todo Will this be performant enough with production
* data, say in the root dataverse? Remove this todo if
* there are no complaints. :)
*/
if (!permissionService.isUserAllowedOn(user, new UpdateDatasetCommand(dataset, dvReq), dataset)) {
continue;
}
String editUri = baseUrl + "/edit/study/" + dataset.getGlobalId();
String editMediaUri = baseUrl + "/edit-media/study/" + dataset.getGlobalId();
Entry entry = feed.addEntry();
entry.setId(editUri);
entry.setTitle(datasetService.getTitleFromLatestVersion(dataset.getId()));
entry.setBaseUri(new IRI(editUri));
entry.addLink(editMediaUri, "edit-media");
feed.addEntry(entry);
}
Boolean dvHasBeenReleased = dv.isReleased();
feed.addSimpleExtension(new QName(UriRegistry.SWORD_STATE, "dataverseHasBeenReleased"), dvHasBeenReleased.toString());
return feed;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifer from URL: " + iri);
}
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class MediaResourceManagerImpl method replaceOrAddFiles.
DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration, boolean shouldReplace) throws SwordError, SwordAuthException, SwordServerException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
urlManager.processUrl(uri);
String globalId = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("study") && globalId != null) {
logger.fine("looking up dataset with globalId " + globalId);
Dataset dataset = datasetService.findByGlobalId(globalId);
if (dataset == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset with global ID of " + globalId);
}
UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq);
if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataset with global ID " + dataset.getGlobalId());
}
// -------------------------------------
if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload.");
}
/**
* @todo decide if we want non zip files to work. Technically, now
* that we're letting ingestService.createDataFiles unpack the zip
* for us, the following *does* work:
*
* curl--data-binary @path/to/trees.png -H "Content-Disposition:
* filename=trees.png" -H "Content-Type: image/png" -H "Packaging:
* http://purl.org/net/sword/package/SimpleZip"
*
* We *might* want to continue to force API users to only upload zip
* files so that some day we can support a including a file or files
* that contain the metadata (i.e. description) for each file in the
* zip: https://github.com/IQSS/dataverse/issues/723
*/
if (!deposit.getPackaging().equals(UriRegistry.PACKAGE_SIMPLE_ZIP)) {
throw new SwordError(UriRegistry.ERROR_CONTENT, 415, "Package format " + UriRegistry.PACKAGE_SIMPLE_ZIP + " is required but format specified in 'Packaging' HTTP header was " + deposit.getPackaging());
}
String uploadedZipFilename = deposit.getFilename();
DatasetVersion editVersion = dataset.getEditVersion();
if (deposit.getInputStream() == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Deposit input stream was null.");
}
int bytesAvailableInInputStream = 0;
try {
bytesAvailableInInputStream = deposit.getInputStream().available();
} catch (IOException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine number of bytes available in input stream: " + ex);
}
if (bytesAvailableInInputStream == 0) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Bytes available in input stream was " + bytesAvailableInInputStream + ". Please check the file you are attempting to deposit.");
}
/**
* @todo Think about if we should instead pass in "application/zip"
* rather than letting ingestService.createDataFiles() guess the
* contentType by passing it "null". See also the note above about
* SimpleZip vs. other contentTypes.
*/
String guessContentTypeForMe = null;
List<DataFile> dataFiles = new ArrayList<>();
try {
try {
dataFiles = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, systemConfig);
} catch (EJBException ex) {
Throwable cause = ex.getCause();
if (cause != null) {
if (cause instanceof IllegalArgumentException) {
/**
* @todo should be safe to remove this catch of
* EJBException and IllegalArgumentException once
* this ticket is resolved:
*
* IllegalArgumentException: MALFORMED when
* uploading certain zip files
* https://github.com/IQSS/dataverse/issues/1021
*/
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. Problem with zip file, perhaps: " + cause);
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + cause);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. No cause: " + ex.getMessage());
}
}
/*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + ex.getMessage());
//Logger.getLogger(MediaResourceManagerImpl.class.getName()).log(Level.SEVERE, null, ex);
}*/
} catch (IOException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage());
}
if (!dataFiles.isEmpty()) {
Set<ConstraintViolation> constraintViolations = editVersion.validate();
if (constraintViolations.size() > 0) {
ConstraintViolation violation = constraintViolations.iterator().next();
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + violation.getMessage() + " The invalid value was \"" + violation.getInvalidValue() + "\".");
} else {
ingestService.addFiles(editVersion, dataFiles);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No files to add to dataset. Perhaps the zip file was empty.");
}
try {
dataset = commandEngine.submit(updateDatasetCommand);
} catch (CommandException ex) {
throw returnEarly("Couldn't update dataset " + ex);
} catch (EJBException ex) {
/**
* @todo stop bothering to catch an EJBException once this has
* been implemented:
*
* Have commands catch ConstraintViolationException and turn
* them into something that inherits from CommandException ยท
* https://github.com/IQSS/dataverse/issues/1009
*/
Throwable cause = ex;
StringBuilder sb = new StringBuilder();
sb.append(ex.getLocalizedMessage());
while (cause.getCause() != null) {
cause = cause.getCause();
sb.append(cause + " ");
if (cause instanceof ConstraintViolationException) {
ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
for (ConstraintViolation<?> violation : constraintViolationException.getConstraintViolations()) {
sb.append(" Invalid value \"").append(violation.getInvalidValue()).append("\" for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
}
}
}
throw returnEarly("EJBException: " + sb.toString());
}
ingestService.startIngestJobs(dataset, user);
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type or identifier from URL: " + uri);
}
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class MediaResourceManagerImpl method deleteMediaResource.
@Override
public void deleteMediaResource(String uri, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
urlManager.processUrl(uri);
String targetType = urlManager.getTargetType();
String fileId = urlManager.getTargetIdentifier();
if (targetType != null && fileId != null) {
if ("file".equals(targetType)) {
String fileIdString = urlManager.getTargetIdentifier();
if (fileIdString != null) {
Long fileIdLong;
try {
fileIdLong = Long.valueOf(fileIdString);
} catch (NumberFormatException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "File id must be a number, not '" + fileIdString + "'. URL was: " + uri);
}
if (fileIdLong != null) {
logger.fine("preparing to delete file id " + fileIdLong);
DataFile fileToDelete = dataFileService.find(fileIdLong);
if (fileToDelete != null) {
Dataset dataset = fileToDelete.getOwner();
Dataset datasetThatOwnsFile = fileToDelete.getOwner();
Dataverse dataverseThatOwnsFile = datasetThatOwnsFile.getOwner();
/**
* @todo it would be nice to have this check higher
* up. Do we really need the file ID? Should the
* last argument to isUserAllowedOn be changed from
* "dataset" to "fileToDelete"?
*/
UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq, fileToDelete);
if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify " + dataverseThatOwnsFile.getAlias());
}
try {
commandEngine.submit(updateDatasetCommand);
} catch (CommandException ex) {
throw SwordUtil.throwSpecialSwordErrorWithoutStackTrace(UriRegistry.ERROR_BAD_REQUEST, "Could not delete file: " + ex);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find file id " + fileIdLong + " from URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find file id in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not file file to delete in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported file type found in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifer not specified in URL: " + uri);
}
}
Aggregations