use of edu.harvard.iq.dataverse.engine.command.exception.CommandException in project dataverse by IQSS.
the class UpdateDatasetTargetURLCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) {
throw new PermissionException("Update Target URL can only be called by superusers.", this, Collections.singleton(Permission.EditDataset), target);
}
IdServiceBean idServiceBean = IdServiceBean.getBean(target.getProtocol(), ctxt);
HashMap<String, String> metadata = idServiceBean.getMetadataFromDatasetForTargetURL(target);
try {
String doiRetString = idServiceBean.modifyIdentifier(target, metadata);
if (doiRetString != null && doiRetString.contains(target.getIdentifier())) {
target.setGlobalIdCreateTime(new Timestamp(new Date().getTime()));
ctxt.em().merge(target);
ctxt.em().flush();
} else {
// do nothing - we'll know it failed because the global id create time won't have been updated.
}
} catch (Exception e) {
// do nothing - idem and the problem has been logged
}
}
use of edu.harvard.iq.dataverse.engine.command.exception.CommandException in project dataverse by IQSS.
the class UpdateDataverseThemeCommand method execute.
/**
* Update Theme and Widget related data for this dataverse, and
* do file management needed for theme images.
*
* @param ctxt
* @return
* @throws CommandException
*/
@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
// Get current dataverse, so we can delete current logo file if necessary
Dataverse currentDv = ctxt.dataverses().find(editedDv.getId());
File logoFileDir = new File(logoPath.toFile(), editedDv.getId().toString());
File currentFile = null;
if (currentDv.getDataverseTheme() != null && currentDv.getDataverseTheme().getLogo() != null) {
currentFile = new File(logoFileDir, currentDv.getDataverseTheme().getLogo());
}
try {
// If edited logo field is empty, and a logoFile currently exists, delete it
if (editedDv.getDataverseTheme() == null || editedDv.getDataverseTheme().getLogo() == null) {
if (currentFile != null) {
currentFile.delete();
}
} else // If edited logo file isn't empty,and uploaded File exists, delete currentFile and copy uploaded file from temp dir to logos dir
if (uploadedFile != null) {
File newFile = new File(logoFileDir, editedDv.getDataverseTheme().getLogo());
if (currentFile != null) {
currentFile.delete();
}
logoFileDir.mkdirs();
Files.copy(uploadedFile.toPath(), newFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
// save updated dataverse to db
return ctxt.dataverses().save(editedDv);
} catch (IOException e) {
// improve error handling
throw new CommandException("Error saving logo file", e, this);
}
}
use of edu.harvard.iq.dataverse.engine.command.exception.CommandException in project dataverse by IQSS.
the class EditDatafilesPage method save.
public String save() {
if (!saveEnabled) {
return "";
}
if (isFileReplaceOperation()) {
try {
return saveReplacementFile();
} catch (FileReplaceException ex) {
String errMsg = ex.getMessage();
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", errMsg));
logger.log(Level.SEVERE, "Dataset save failed for replace operation: {0}", errMsg);
return null;
}
}
// Save the NEW files permanently:
ingestService.addFiles(workingVersion, newFiles);
if (workingVersion.getId() == null || datasetUpdateRequired) {
logger.fine("issuing the dataset update command");
if (datasetUpdateRequired) {
for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) {
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().getStorageIdentifier() != null) {
if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) {
workingVersion.getFileMetadatas().set(i, fileMetadata);
}
}
}
}
if (tabularDataTagsUpdated) {
for (int i = 0; i < dataset.getFiles().size(); i++) {
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().getStorageIdentifier() != null) {
if (fileMetadata.getDataFile().getStorageIdentifier().equals(dataset.getFiles().get(i).getStorageIdentifier())) {
dataset.getFiles().set(i, fileMetadata.getDataFile());
}
}
}
}
tabularDataTagsUpdated = false;
}
}
Command<Dataset> cmd;
try {
cmd = new UpdateDatasetCommand(dataset, dvRequestService.getDataverseRequest(), filesToBeDeleted);
((UpdateDatasetCommand) cmd).setValidateLenient(true);
dataset = commandEngine.submit(cmd);
} catch (EJBException ex) {
StringBuilder error = new StringBuilder();
error.append(ex).append(" ");
error.append(ex.getMessage()).append(" ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
error.append(cause).append(" ");
error.append(cause.getMessage()).append(" ");
}
logger.log(Level.INFO, "Couldn''t save dataset: {0}", error.toString());
populateDatasetUpdateFailureMessage();
return null;
} catch (CommandException ex) {
FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString()));
logger.severe(ex.getMessage());
populateDatasetUpdateFailureMessage();
return null;
}
datasetUpdateRequired = false;
saveEnabled = false;
} else {
// This is an existing Draft version (and nobody has explicitly
// requested that the entire dataset is updated). So we'll try to update
// only the filemetadatas and/or files affected, and not the
// entire version.
// TODO: in 4.3, create SaveDataFileCommand!
// -- L.A. Sep. 21 2015, 4.2
Timestamp updateTime = new Timestamp(new Date().getTime());
workingVersion.setLastUpdateTime(updateTime);
dataset.setModificationTime(updateTime);
StringBuilder saveError = new StringBuilder();
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().getCreateDate() == null) {
fileMetadata.getDataFile().setCreateDate(updateTime);
fileMetadata.getDataFile().setCreator((AuthenticatedUser) session.getUser());
}
fileMetadata.getDataFile().setModificationTime(updateTime);
try {
// DataFile savedDatafile = datafileService.save(fileMetadata.getDataFile());
fileMetadata = datafileService.mergeFileMetadata(fileMetadata);
logger.fine("Successfully saved DataFile " + fileMetadata.getLabel() + " in the database.");
} catch (EJBException ex) {
saveError.append(ex).append(" ");
saveError.append(ex.getMessage()).append(" ");
Throwable cause = ex;
while (cause.getCause() != null) {
cause = cause.getCause();
saveError.append(cause).append(" ");
saveError.append(cause.getMessage()).append(" ");
}
}
}
// Remove / delete any files that were removed
for (FileMetadata fmd : filesToBeDeleted) {
// check if this file is being used as the default thumbnail
if (fmd.getDataFile().equals(dataset.getThumbnailFile())) {
logger.fine("deleting the dataset thumbnail designation");
dataset.setThumbnailFile(null);
}
if (!fmd.getDataFile().isReleased()) {
// if file is draft (ie. new to this version, delete; otherwise just remove filemetadata object)
try {
commandEngine.submit(new DeleteDataFileCommand(fmd.getDataFile(), dvRequestService.getDataverseRequest()));
dataset.getFiles().remove(fmd.getDataFile());
workingVersion.getFileMetadatas().remove(fmd);
// todo: clean this up some when we clean the create / update dataset methods
for (DataFileCategory cat : dataset.getCategories()) {
cat.getFileMetadatas().remove(fmd);
}
} catch (CommandException cmde) {
// TODO:
// add diagnostics reporting for individual data files that
// we failed to delete.
}
} else {
datafileService.removeFileMetadata(fmd);
fmd.getDataFile().getFileMetadatas().remove(fmd);
workingVersion.getFileMetadatas().remove(fmd);
}
}
String saveErrorString = saveError.toString();
if (saveErrorString != null && !saveErrorString.isEmpty()) {
logger.log(Level.INFO, "Couldn''t save dataset: {0}", saveErrorString);
populateDatasetUpdateFailureMessage();
return null;
}
// the id for null, just in case)
if (mode == FileEditMode.UPLOAD) {
if (dataset.getId() != null) {
dataset = datasetService.find(dataset.getId());
}
}
}
newFiles.clear();
workingVersion = dataset.getEditVersion();
logger.fine("working version id: " + workingVersion.getId());
if (mode == FileEditMode.SINGLE) {
JsfHelper.addSuccessMessage(getBundleString("file.message.editSuccess"));
} else {
JsfHelper.addSuccessMessage(getBundleString("dataset.message.filesSuccess"));
}
// queue the data ingest jobs for asynchronous execution:
if (mode == FileEditMode.UPLOAD) {
ingestService.startIngestJobs(dataset, (AuthenticatedUser) session.getUser());
}
if (mode == FileEditMode.SINGLE && fileMetadatas.size() > 0) {
// If this was a "single file edit", i.e. an edit request sent from
// the individual File Landing page, we want to redirect back to
// the landing page. BUT ONLY if the file still exists - i.e., if
// the user hasn't just deleted it!
versionString = "DRAFT";
return returnToFileLandingPage();
}
// if (newDraftVersion) {
// return returnToDraftVersionById();
// }
logger.fine("Redirecting to the dataset page, from the edit/upload page.");
return returnToDraftVersion();
}
use of edu.harvard.iq.dataverse.engine.command.exception.CommandException in project dataverse by IQSS.
the class EditDatafilesPage method deleteDatasetLogoAndUseThisDataFileAsThumbnailInstead.
public void deleteDatasetLogoAndUseThisDataFileAsThumbnailInstead() {
logger.log(Level.FINE, "For dataset id {0} the current thumbnail is from a dataset logo rather than a dataset file, blowing away the logo and using this FileMetadata id instead: {1}", new Object[] { dataset.getId(), fileMetadataSelectedForThumbnailPopup });
/**
* @todo Rather than deleting and merging right away, try to respect how
* this page seems to stage actions and giving the user a chance to
* review before clicking "Save Changes".
*/
try {
DatasetThumbnail datasetThumbnail = commandEngine.submit(new UpdateDatasetThumbnailCommand(dvRequestService.getDataverseRequest(), dataset, UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, fileMetadataSelectedForThumbnailPopup.getDataFile().getId(), null));
// look up the dataset again because the UpdateDatasetThumbnailCommand mutates (merges) the dataset
dataset = datasetService.find(dataset.getId());
} catch (CommandException ex) {
String error = "Problem setting thumbnail for dataset id " + dataset.getId() + ".: " + ex;
// show this error to the user?
logger.info(error);
}
}
use of edu.harvard.iq.dataverse.engine.command.exception.CommandException in project dataverse by IQSS.
the class DataversePage method save.
public String save() {
List<DataverseFieldTypeInputLevel> listDFTIL = new ArrayList<>();
if (editMode != null && editMode.equals(EditMode.INFO)) {
List<MetadataBlock> selectedBlocks = new ArrayList<>();
if (dataverse.isMetadataBlockRoot()) {
dataverse.getMetadataBlocks().clear();
}
for (MetadataBlock mdb : this.allMetadataBlocks) {
if (dataverse.isMetadataBlockRoot() && (mdb.isSelected() || mdb.isRequired())) {
selectedBlocks.add(mdb);
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (dsft.isRequiredDV() && !dsft.isRequired() && ((!dsft.isHasParent() && dsft.isInclude()) || (dsft.isHasParent() && dsft.getParentDatasetFieldType().isInclude()))) {
DataverseFieldTypeInputLevel dftil = new DataverseFieldTypeInputLevel();
dftil.setDatasetFieldType(dsft);
dftil.setDataverse(dataverse);
dftil.setRequired(true);
dftil.setInclude(true);
listDFTIL.add(dftil);
}
if ((!dsft.isHasParent() && !dsft.isInclude()) || (dsft.isHasParent() && !dsft.getParentDatasetFieldType().isInclude())) {
DataverseFieldTypeInputLevel dftil = new DataverseFieldTypeInputLevel();
dftil.setDatasetFieldType(dsft);
dftil.setDataverse(dataverse);
dftil.setRequired(false);
dftil.setInclude(false);
listDFTIL.add(dftil);
}
}
}
}
if (!selectedBlocks.isEmpty()) {
dataverse.setMetadataBlocks(selectedBlocks);
}
if (!dataverse.isFacetRoot()) {
facets.getTarget().clear();
}
}
Command<Dataverse> cmd = null;
// TODO change to Create - for now the page is expecting INFO instead.
Boolean create;
if (dataverse.getId() == null) {
if (session.getUser().isAuthenticated()) {
dataverse.setOwner(ownerId != null ? dataverseService.find(ownerId) : null);
create = Boolean.TRUE;
cmd = new CreateDataverseCommand(dataverse, dvRequestService.getDataverseRequest(), facets.getTarget(), listDFTIL);
} else {
JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataverse.create.authenticatedUsersOnly"));
return null;
}
} else {
create = Boolean.FALSE;
if (editMode != null && editMode.equals(EditMode.FEATURED)) {
cmd = new UpdateDataverseCommand(dataverse, null, featuredDataverses.getTarget(), dvRequestService.getDataverseRequest(), null);
} else {
cmd = new UpdateDataverseCommand(dataverse, facets.getTarget(), null, dvRequestService.getDataverseRequest(), listDFTIL);
}
}
try {
dataverse = commandEngine.submit(cmd);
if (session.getUser() instanceof AuthenticatedUser) {
if (create) {
userNotificationService.sendNotification((AuthenticatedUser) session.getUser(), dataverse.getCreateDate(), Type.CREATEDV, dataverse.getId());
}
}
String message;
if (editMode != null && editMode.equals(EditMode.FEATURED)) {
message = "The featured dataverses for this dataverse have been updated.";
} else {
message = (create) ? BundleUtil.getStringFromBundle("dataverse.create.success", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), systemConfig.getGuidesVersion())) : BundleUtil.getStringFromBundle("dataverse.update.success");
}
JsfHelper.addSuccessMessage(message);
editMode = null;
return returnRedirect();
} catch (CommandException ex) {
logger.log(Level.SEVERE, "Unexpected Exception calling dataverse command", ex);
String errMsg = create ? BundleUtil.getStringFromBundle("dataverse.create.failure") : BundleUtil.getStringFromBundle("dataverse.update.failure");
JH.addMessage(FacesMessage.SEVERITY_FATAL, errMsg);
return null;
} catch (Exception e) {
logger.log(Level.SEVERE, "Unexpected Exception calling dataverse command", e);
String errMsg = create ? BundleUtil.getStringFromBundle("dataverse.create.failure") : BundleUtil.getStringFromBundle("dataverse.update.failure");
JH.addMessage(FacesMessage.SEVERITY_FATAL, errMsg);
return null;
}
}
Aggregations