use of edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException in project dataverse by IQSS.
the class UpdateDatasetVersionCommand method execute.
@Override
public DatasetVersion execute(CommandContext ctxt) throws CommandException {
Dataset ds = newVersion.getDataset();
ctxt.permissions().checkEditDatasetLock(ds, getRequest(), this);
DatasetVersion latest = ds.getLatestVersion();
if (latest == null) {
throw new IllegalCommandException("Dataset " + ds.getId() + " does not have a latest version.", this);
}
if (!latest.isDraft()) {
throw new IllegalCommandException("Cannot update a dataset version that's not a draft", this);
}
DatasetVersion edit = ds.getEditVersion();
edit.setDatasetFields(newVersion.getDatasetFields());
edit.setDatasetFields(edit.initDatasetFields());
Set<ConstraintViolation> constraintViolations = edit.validate();
if (!constraintViolations.isEmpty()) {
String validationFailedString = "Validation failed:";
for (ConstraintViolation constraintViolation : constraintViolations) {
validationFailedString += " " + constraintViolation.getMessage();
}
throw new IllegalCommandException(validationFailedString, this);
}
Iterator<DatasetField> dsfIt = edit.getDatasetFields().iterator();
while (dsfIt.hasNext()) {
if (dsfIt.next().removeBlankDatasetFieldValues()) {
dsfIt.remove();
}
}
Timestamp now = new Timestamp(new Date().getTime());
edit.setLastUpdateTime(now);
ds.setModificationTime(now);
DatasetVersion managed = ctxt.em().merge(edit);
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(managed.getDataset(), doNormalSolrDocCleanUp);
return managed;
}
use of edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException in project dataverse by IQSS.
the class AddRoleAssigneesToExplicitGroupCommand method execute.
@Override
public ExplicitGroup execute(CommandContext ctxt) throws CommandException {
List<String> nonexistentRAs = new LinkedList<>();
for (String rai : roleAssigneeIdentifiers) {
RoleAssignee ra = null;
try {
ra = ctxt.roleAssignees().getRoleAssignee(rai);
} catch (EJBException iae) {
if (iae.getCausedByException() instanceof IllegalArgumentException) {
throw new IllegalCommandException("Bad role assignee name:" + rai, this);
}
}
if (ra == null) {
nonexistentRAs.add(rai);
} else {
try {
explicitGroup.add(ra);
} catch (GroupException ex) {
Logger.getLogger(AddRoleAssigneesToExplicitGroupCommand.class.getName()).log(Level.WARNING, "Error adding role assignee " + rai + " to group" + " " + explicitGroup.getIdentifier(), ex);
throw new IllegalCommandException("Error adding " + rai + " to group " + explicitGroup.getIdentifier() + ": " + ex.getMessage(), this);
}
}
}
if (nonexistentRAs.isEmpty()) {
return ctxt.explicitGroups().persist(explicitGroup);
} else {
StringBuilder sb = new StringBuilder();
for (String s : nonexistentRAs) {
sb.append(s).append(", ");
}
sb.setLength(sb.length() - 2);
throw new IllegalCommandException("The following role assignees were not found: " + sb.toString(), this);
}
}
use of edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException in project dataverse by IQSS.
the class CreateDatasetCommand method execute.
@Override
public Dataset execute(CommandContext ctxt) throws CommandException {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd-hh.mm.ss");
IdServiceBean idServiceBean = IdServiceBean.getBean(theDataset.getProtocol(), ctxt);
if (theDataset.getIdentifier() == null || theDataset.getIdentifier().isEmpty()) {
theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, idServiceBean));
}
if ((importType != ImportType.MIGRATION && importType != ImportType.HARVEST) && !ctxt.datasets().isIdentifierUniqueInDatabase(theDataset.getIdentifier(), theDataset, idServiceBean)) {
throw new IllegalCommandException(String.format("Dataset with identifier '%s', protocol '%s' and authority '%s' already exists", theDataset.getIdentifier(), theDataset.getProtocol(), theDataset.getAuthority()), this);
}
// If we are importing with the API, then we don't want to create an editable version,
// just save the version is already in theDataset.
DatasetVersion dsv = importType != null ? theDataset.getLatestVersion() : theDataset.getEditVersion();
// validate
// @todo for now we run through an initFields method that creates empty fields for anything without a value
// that way they can be checked for required
dsv.setDatasetFields(dsv.initDatasetFields());
Set<ConstraintViolation> constraintViolations = dsv.validate();
if (!constraintViolations.isEmpty()) {
String validationFailedString = "Validation failed:";
for (ConstraintViolation constraintViolation : constraintViolations) {
validationFailedString += " " + constraintViolation.getMessage();
validationFailedString += " Invalid value: '" + constraintViolation.getInvalidValue() + "'.";
}
throw new IllegalCommandException(validationFailedString, this);
}
theDataset.setCreator((AuthenticatedUser) getRequest().getUser());
theDataset.setCreateDate(new Timestamp(new Date().getTime()));
Iterator<DatasetField> dsfIt = dsv.getDatasetFields().iterator();
while (dsfIt.hasNext()) {
if (dsfIt.next().removeBlankDatasetFieldValues()) {
dsfIt.remove();
}
}
Iterator<DatasetField> dsfItSort = dsv.getDatasetFields().iterator();
while (dsfItSort.hasNext()) {
dsfItSort.next().setValueDisplayOrder();
}
Timestamp createDate = new Timestamp(new Date().getTime());
dsv.setCreateTime(createDate);
dsv.setLastUpdateTime(createDate);
theDataset.setModificationTime(createDate);
for (DataFile dataFile : theDataset.getFiles()) {
dataFile.setCreator((AuthenticatedUser) getRequest().getUser());
dataFile.setCreateDate(theDataset.getCreateDate());
}
String nonNullDefaultIfKeyNotFound = "";
String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
String doiSeparator = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound);
String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound);
if (theDataset.getProtocol() == null)
theDataset.setProtocol(protocol);
if (theDataset.getAuthority() == null)
theDataset.setAuthority(authority);
if (theDataset.getDoiSeparator() == null)
theDataset.setDoiSeparator(doiSeparator);
if (theDataset.getStorageIdentifier() == null) {
try {
DataAccess.createNewStorageIO(theDataset, "placeholder");
} catch (IOException ioex) {
// if setting the storage identifier through createNewStorageIO fails, dataset creation
// does not have to fail. we just set the storage id to a default -SF
String storageDriver = (System.getProperty("dataverse.files.storage-driver-id") != null) ? System.getProperty("dataverse.files.storage-driver-id") : "file";
theDataset.setStorageIdentifier(storageDriver + "://" + theDataset.getAuthority() + theDataset.getDoiSeparator() + theDataset.getIdentifier());
logger.info("Failed to create StorageIO. StorageIdentifier set to default. Not fatal." + "(" + ioex.getMessage() + ")");
}
}
if (theDataset.getIdentifier() == null) {
/*
If this command is being executed to save a new dataset initialized
by the Dataset page (in CREATE mode), it already has the persistent
identifier.
Same with a new harvested dataset - the imported metadata record
must have contained a global identifier, for the harvester to be
trying to save it permanently in the database.
In some other cases, such as when a new dataset is created
via the API, the identifier will need to be generated here.
-- L.A. 4.6.2
*/
theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, idServiceBean));
}
logger.fine("Saving the files permanently.");
ctxt.ingest().addFiles(dsv, theDataset.getFiles());
logger.log(Level.FINE, "doiProvider={0} protocol={1} importType={2} GlobalIdCreateTime=={3}", new Object[] { doiProvider, protocol, importType, theDataset.getGlobalIdCreateTime() });
// Attempt the registration if importing dataset through the API, or the app (but not harvest or migrate)
if ((importType == null || importType.equals(ImportType.NEW)) && theDataset.getGlobalIdCreateTime() == null) {
String doiRetString = "";
idServiceBean = IdServiceBean.getBean(ctxt);
try {
logger.log(Level.FINE, "creating identifier");
doiRetString = idServiceBean.createIdentifier(theDataset);
} catch (Throwable e) {
logger.log(Level.WARNING, "Exception while creating Identifier: " + e.getMessage(), e);
}
// Check return value to make sure registration succeeded
if (!idServiceBean.registerWhenPublished() && doiRetString.contains(theDataset.getIdentifier())) {
theDataset.setGlobalIdCreateTime(createDate);
}
} else // so set the globalIdCreateTime to now
if (theDataset.getLatestVersion().getVersionState().equals(VersionState.RELEASED)) {
theDataset.setGlobalIdCreateTime(new Date());
}
if (registrationRequired && theDataset.getGlobalIdCreateTime() == null) {
throw new IllegalCommandException("Dataset could not be created. Registration failed", this);
}
logger.log(Level.FINE, "after doi {0}", formatter.format(new Date().getTime()));
Dataset savedDataset = ctxt.em().merge(theDataset);
logger.log(Level.FINE, "after db update {0}", formatter.format(new Date().getTime()));
// set the role to be default contributor role for its dataverse
if (importType == null || importType.equals(ImportType.NEW)) {
String privateUrlToken = null;
ctxt.roles().save(new RoleAssignment(savedDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), savedDataset, privateUrlToken));
}
savedDataset.setPermissionModificationTime(new Timestamp(new Date().getTime()));
savedDataset = ctxt.em().merge(savedDataset);
if (template != null) {
ctxt.templates().incrementUsageCount(template.getId());
}
logger.fine("Checking if rsync support is enabled.");
if (DataCaptureModuleUtil.rsyncSupportEnabled(ctxt.settings().getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
try {
ScriptRequestResponse scriptRequestResponse = ctxt.engine().submit(new RequestRsyncScriptCommand(getRequest(), savedDataset));
logger.fine("script: " + scriptRequestResponse.getScript());
} catch (RuntimeException ex) {
logger.info("Problem getting rsync script: " + ex.getLocalizedMessage());
}
}
logger.fine("Done with rsync request, if any.");
try {
/**
* @todo Do something with the result. Did it succeed or fail?
*/
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp);
} catch (Exception e) {
// RuntimeException e ) {
// , e);
logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage());
/**
* Even though the original intention appears to have been to allow the
* dataset to be successfully created, even if an exception is thrown during
* the indexing - in reality, a runtime exception there, even caught,
* still forces the EJB transaction to be rolled back; hence the
* dataset is NOT created... but the command completes and exits as if
* it has been successful.
* So I am going to throw a Command Exception here, to avoid this.
* If we DO want to be able to create datasets even if they cannot
* be immediately indexed, we'll have to figure out how to do that.
* (Note that import is still possible when Solr is down - because indexDataset()
* does NOT throw an exception if it is.
* -- L.A. 4.5
*/
throw new CommandException("Dataset could not be created. Indexing failed", this);
}
logger.log(Level.FINE, "after index {0}", formatter.format(new Date().getTime()));
// if we are not migrating, assign the user to this version
if (importType == null || importType.equals(ImportType.NEW)) {
DatasetVersionUser datasetVersionDataverseUser = new DatasetVersionUser();
String id = getRequest().getUser().getIdentifier();
id = id.startsWith("@") ? id.substring(1) : id;
AuthenticatedUser au = ctxt.authentication().getAuthenticatedUser(id);
datasetVersionDataverseUser.setAuthenticatedUser(au);
datasetVersionDataverseUser.setDatasetVersion(savedDataset.getLatestVersion());
datasetVersionDataverseUser.setLastUpdateDate(createDate);
if (savedDataset.getLatestVersion().getId() == null) {
logger.warning("CreateDatasetCommand: savedDataset version id is null");
} else {
datasetVersionDataverseUser.setDatasetVersion(savedDataset.getLatestVersion());
}
ctxt.em().merge(datasetVersionDataverseUser);
}
logger.log(Level.FINE, "after create version user " + formatter.format(new Date().getTime()));
return savedDataset;
}
use of edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException in project dataverse by IQSS.
the class CreateRoleCommand method execute.
@Override
public DataverseRole execute(CommandContext ctxt) throws CommandException {
User user = getUser();
// todo: temporary for 4.0 - only superusers can create and edit roles
if ((!(user instanceof AuthenticatedUser) || !user.isSuperuser())) {
throw new IllegalCommandException("Roles can only be created or edited by superusers.", this);
}
dv.addRole(created);
return ctxt.roles().save(created);
}
use of edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException in project dataverse by IQSS.
the class DeleteDatasetVersionCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
ctxt.permissions().checkEditDatasetLock(doomed, getRequest(), this);
// if you are deleting a dataset that only has 1 draft, we are actually destroying the dataset
if (doomed.getVersions().size() == 1) {
ctxt.engine().submit(new DestroyDatasetCommand(doomed, getRequest()));
} else {
// we are only deleting a version
// todo: for now, it's only the latest and if it's a draft
// but we should add the ability to destroy a specific version
DatasetVersion doomedVersion = doomed.getLatestVersion();
if (doomedVersion.isDraft()) {
Long versionId = doomedVersion.getId();
// files
Iterator<FileMetadata> fmIt = doomedVersion.getFileMetadatas().iterator();
while (fmIt.hasNext()) {
FileMetadata fmd = fmIt.next();
if (!fmd.getDataFile().isReleased()) {
// if file is draft (ie. new to this version, delete
// and remove fileMetadata from list (so that it won't try to merge)
ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest()));
fmIt.remove();
}
}
DatasetVersion doomedAndMerged = ctxt.em().merge(doomedVersion);
ctxt.em().remove(doomedAndMerged);
// remove version from ds obj before indexing....
Iterator<DatasetVersion> dvIt = doomed.getVersions().iterator();
while (dvIt.hasNext()) {
DatasetVersion dv = dvIt.next();
if (versionId.equals(dv.getId())) {
dvIt.remove();
}
}
/**
* DeleteDatasetDraft, which is required by this command,
* DeleteDatasetVersionCommand is not sufficient for running
* GetPrivateUrlCommand nor DeletePrivateUrlCommand, both of
* which require ManageDatasetPermissions because
* DeletePrivateUrlCommand calls RevokeRoleCommand which
* requires ManageDatasetPermissions when executed on a dataset
* so we make direct calls to the service bean so that a lowly
* Contributor who does NOT have ManageDatasetPermissions can
* still successfully delete a Private URL.
*/
PrivateUrl privateUrl = ctxt.privateUrl().getPrivateUrlFromDatasetId(doomed.getId());
if (privateUrl != null) {
logger.fine("Deleting Private URL for dataset id " + doomed.getId());
PrivateUrlUser privateUrlUser = new PrivateUrlUser(doomed.getId());
List<RoleAssignment> roleAssignments = ctxt.roles().directRoleAssignments(privateUrlUser, doomed);
for (RoleAssignment roleAssignment : roleAssignments) {
ctxt.roles().revoke(roleAssignment);
}
}
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(doomed, doNormalSolrDocCleanUp);
return;
}
throw new IllegalCommandException("Cannot delete a released version", this);
}
}
Aggregations