use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class ContainerManagerImpl method replaceMetadata.
@Override
public DepositReceipt replaceMetadata(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
logger.fine("replaceMetadata called with url: " + uri);
urlManager.processUrl(uri);
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
if ("dataverse".equals(targetType)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Metadata replace of dataverse is not supported.");
} else if ("study".equals(targetType)) {
logger.fine("replacing metadata for dataset");
// do a sanity check on the XML received
try {
SwordEntry swordEntry = deposit.getSwordEntry();
logger.fine("deposit XML received by replaceMetadata():\n" + swordEntry);
} catch (ParseException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can not replace dataset metadata due to malformed Atom entry: " + ex);
}
String globalId = urlManager.getTargetIdentifier();
Dataset dataset = datasetService.findByGlobalId(globalId);
if (dataset != null) {
Dataverse dvThatOwnsDataset = dataset.getOwner();
UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq);
if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias());
}
DatasetVersion datasetVersion = dataset.getEditVersion();
// erase all metadata before creating populating dataset version
List<DatasetField> emptyDatasetFields = new ArrayList<>();
datasetVersion.setDatasetFields(emptyDatasetFields);
String foreignFormat = SwordUtil.DCTERMS;
try {
importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, datasetVersion);
} catch (Exception ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
}
swordService.addDatasetContact(datasetVersion, user);
swordService.addDatasetDepositor(datasetVersion, user);
swordService.addDatasetSubjectIfMissing(datasetVersion);
swordService.setDatasetLicenseAndTermsOfUse(datasetVersion, deposit.getSwordEntry());
try {
engineSvc.submit(updateDatasetCommand);
} catch (CommandException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem updating dataset: " + ex);
}
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset based on global id (" + globalId + ") in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unknown target type specified on which to replace metadata: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target specified on which to replace metadata: " + uri);
}
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class FilePage method isLockedFromEdits.
/**
* Authors are not allowed to edit but curators are allowed - when Dataset is inReview
* For all other locks edit should be locked for all editors.
*/
public boolean isLockedFromEdits() {
if (null == dataset) {
dataset = fileMetadata.getDataFile().getOwner();
}
if (null == lockedFromEditsVar) {
try {
permissionService.checkEditDatasetLock(dataset, dvRequestService.getDataverseRequest(), new UpdateDatasetCommand(dataset, dvRequestService.getDataverseRequest()));
lockedFromEditsVar = false;
} catch (IllegalCommandException ex) {
lockedFromEditsVar = true;
}
}
return lockedFromEditsVar;
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class AddReplaceFileHelper method step_070_run_update_dataset_command.
/**
* Create and run the update dataset command
*
* @return
*/
private boolean step_070_run_update_dataset_command() {
if (this.hasError()) {
return false;
}
Command<Dataset> update_cmd;
update_cmd = new UpdateDatasetCommand(dataset, dvRequest);
((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
try {
// Submit the update dataset command
// and update the local dataset object
//
dataset = commandEngine.submit(update_cmd);
} catch (CommandException ex) {
/**
* @todo Add a test to exercise this error.
*/
this.addErrorSevere(getBundleErr("add.add_file_error"));
logger.severe(ex.getMessage());
return false;
} catch (EJBException ex) {
/**
* @todo Add a test to exercise this error.
*/
this.addErrorSevere("add.add_file_error (see logs)");
logger.severe(ex.getMessage());
return false;
}
return true;
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class EditDatafilesPage method save.
public String save() {
if (!saveEnabled) {
return "";
}
if (isFileReplaceOperation()) {
try {
return saveReplacementFile();
} catch (FileReplaceException ex) {
String errMsg = ex.getMessage();
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", errMsg));
logger.log(Level.SEVERE, "Dataset save failed for replace operation: {0}", errMsg);
return null;
}
}
// Save the NEW files permanently:
ingestService.addFiles(workingVersion, newFiles);
if (workingVersion.getId() == null || datasetUpdateRequired) {
logger.fine("issuing the dataset update command");
if (datasetUpdateRequired) {
for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) {
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().getStorageIdentifier() != null) {
if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) {
workingVersion.getFileMetadatas().set(i, fileMetadata);
}
}
}
}
if (tabularDataTagsUpdated) {
for (int i = 0; i < dataset.getFiles().size(); i++) {
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().getStorageIdentifier() != null) {
if (fileMetadata.getDataFile().getStorageIdentifier().equals(dataset.getFiles().get(i).getStorageIdentifier())) {
dataset.getFiles().set(i, fileMetadata.getDataFile());
}
}
}
}
tabularDataTagsUpdated = false;
}
}
Command<Dataset> cmd;
try {
cmd = new UpdateDatasetCommand(dataset, dvRequestService.getDataverseRequest(), filesToBeDeleted);
((UpdateDatasetCommand) cmd).setValidateLenient(true);
dataset = commandEngine.submit(cmd);
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex).append(" ");
error.append(ex.getMessage()).append(" ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause).append(" ");
error.append(cause.getMessage()).append(" ");
}
logger.log(Level.INFO, "Couldn''t save dataset: {0}", error.toString());
populateDatasetUpdateFailureMessage();
return null;
} catch (CommandException ex) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString()));
logger.severe(ex.getMessage());
populateDatasetUpdateFailureMessage();
return null;
}
datasetUpdateRequired = false;
saveEnabled = false;
} else {
// This is an existing Draft version (and nobody has explicitly
// requested that the entire dataset is updated). So we'll try to update
// only the filemetadatas and/or files affected, and not the
// entire version.
// TODO: in 4.3, create SaveDataFileCommand!
// -- L.A. Sep. 21 2015, 4.2
Timestamp updateTime = new Timestamp(new Date().getTime());
workingVersion.setLastUpdateTime(updateTime);
dataset.setModificationTime(updateTime);
StringBuilder saveError = new StringBuilder();
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().getCreateDate() == null) {
fileMetadata.getDataFile().setCreateDate(updateTime);
fileMetadata.getDataFile().setCreator((AuthenticatedUser) session.getUser());
}
fileMetadata.getDataFile().setModificationTime(updateTime);
try {
// DataFile savedDatafile = datafileService.save(fileMetadata.getDataFile());
fileMetadata = datafileService.mergeFileMetadata(fileMetadata);
logger.fine("Successfully saved DataFile " + fileMetadata.getLabel() + " in the database.");
} catch (EJBException ex) {
saveError.append(ex).append(" ");
saveError.append(ex.getMessage()).append(" ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
saveError.append(cause).append(" ");
saveError.append(cause.getMessage()).append(" ");
}
}
}
// Remove / delete any files that were removed
for (FileMetadata fmd : filesToBeDeleted) {
// check if this file is being used as the default thumbnail
if (fmd.getDataFile().equals(dataset.getThumbnailFile())) {
logger.fine("deleting the dataset thumbnail designation");
dataset.setThumbnailFile(null);
}
if (!fmd.getDataFile().isReleased()) {
// if file is draft (ie. new to this version, delete; otherwise just remove filemetadata object)
try {
commandEngine.submit(new DeleteDataFileCommand(fmd.getDataFile(), dvRequestService.getDataverseRequest()));
dataset.getFiles().remove(fmd.getDataFile());
workingVersion.getFileMetadatas().remove(fmd);
// todo: clean this up some when we clean the create / update dataset methods
for (DataFileCategory cat : dataset.getCategories()) {
cat.getFileMetadatas().remove(fmd);
}
} catch (CommandException cmde) {
// TODO:
// add diagnostics reporting for individual data files that
// we failed to delete.
}
} else {
datafileService.removeFileMetadata(fmd);
fmd.getDataFile().getFileMetadatas().remove(fmd);
workingVersion.getFileMetadatas().remove(fmd);
}
}
String saveErrorString = saveError.toString();
if (saveErrorString != null && !saveErrorString.isEmpty()) {
logger.log(Level.INFO, "Couldn''t save dataset: {0}", saveErrorString);
populateDatasetUpdateFailureMessage();
return null;
}
// the id for null, just in case)
if (mode == FileEditMode.UPLOAD) {
if (dataset.getId() != null) {
dataset = datasetService.find(dataset.getId());
}
}
}
newFiles.clear();
workingVersion = dataset.getEditVersion();
logger.fine("working version id: " + workingVersion.getId());
if (mode == FileEditMode.SINGLE) {
JsfHelper.addSuccessMessage(getBundleString("file.message.editSuccess"));
} else {
JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess"));
}
// queue the data ingest jobs for asynchronous execution:
if (mode == FileEditMode.UPLOAD) {
ingestService.startIngestJobs(dataset, (AuthenticatedUser) session.getUser());
}
if (mode == FileEditMode.SINGLE && fileMetadatas.size() > 0) {
// If this was a "single file edit", i.e. an edit request sent from
// the individual File Landing page, we want to redirect back to
// the landing page. BUT ONLY if the file still exists - i.e., if
// the user hasn't just deleted it!
versionString = "DRAFT";
return returnToFileLandingPage();
}
// if (newDraftVersion) {
// return returnToDraftVersionById();
// }
logger.fine("Redirecting to the dataset page, from the edit/upload page.");
return returnToDraftVersion();
}
use of edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand in project dataverse by IQSS.
the class DatasetPage method registerDataset.
public String registerDataset() {
UpdateDatasetCommand cmd;
try {
cmd = new UpdateDatasetCommand(dataset, dvRequestService.getDataverseRequest());
cmd.setValidateLenient(true);
dataset = commandEngine.submit(cmd);
} catch (CommandException ex) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_WARN, "Dataset Registration Failed", " - " + ex.toString()));
logger.severe(ex.getMessage());
}
FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, "DatasetRegistered", "Your dataset is now registered.");
FacesContext.getCurrentInstance().addMessage(null, message);
return returnToDatasetOnly();
}
Aggregations