use of edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser in project dataverse by IQSS.
the class RevokeAllRolesCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) {
throw new PermissionException("Revoke Superuser status command can only be called by superusers.", this, null, null);
}
try {
ctxt.roles().revokeAll(assignee);
ctxt.explicitGroups().revokeAllGroupsForAssignee(assignee);
} catch (Exception ex) {
throw new CommandException("Failed to revoke role assignments and/or group membership", this);
}
}
use of edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser in project dataverse by IQSS.
the class RevokeSuperuserStatusCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) {
throw new PermissionException("Revoke Superuser status command can only be called by superusers.", this, null, null);
}
try {
targetUser.setSuperuser(false);
ctxt.em().merge(targetUser);
ctxt.em().flush();
} catch (Exception e) {
throw new CommandException("Failed to revoke the superuser status for user " + targetUser.getIdentifier(), this);
}
}
use of edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser in project dataverse by IQSS.
the class SubmitDatasetForReviewCommand method save.
public Dataset save(CommandContext ctxt) throws CommandException {
Timestamp updateTime = new Timestamp(new Date().getTime());
theDataset.getEditVersion().setLastUpdateTime(updateTime);
theDataset.setModificationTime(updateTime);
Dataset savedDataset = ctxt.em().merge(theDataset);
ctxt.em().flush();
DatasetVersionUser ddu = ctxt.datasets().getDatasetVersionUser(theDataset.getLatestVersion(), this.getUser());
if (ddu != null) {
ddu.setLastUpdateDate(updateTime);
ctxt.em().merge(ddu);
} else {
// TODO: This logic to update the DatasetVersionUser was copied from UpdateDatasetCommand and also appears in CreateDatasetCommand, PublishDatasetCommand UpdateDatasetCommand, and ReturnDatasetToAuthorCommand. Consider consolidating.
DatasetVersionUser datasetDataverseUser = new DatasetVersionUser();
datasetDataverseUser.setDatasetVersion(savedDataset.getLatestVersion());
datasetDataverseUser.setLastUpdateDate((Timestamp) updateTime);
String id = getUser().getIdentifier();
id = id.startsWith("@") ? id.substring(1) : id;
AuthenticatedUser au = ctxt.authentication().getAuthenticatedUser(id);
datasetDataverseUser.setAuthenticatedUser(au);
ctxt.em().merge(datasetDataverseUser);
}
List<AuthenticatedUser> authUsers = ctxt.permissions().getUsersWithPermissionOn(Permission.PublishDataset, savedDataset);
for (AuthenticatedUser au : authUsers) {
ctxt.notifications().sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.SUBMITTEDDS, savedDataset.getLatestVersion().getId());
}
// TODO: What should we do with the indexing result? Print it to the log?
boolean doNormalSolrDocCleanUp = true;
Future<String> indexingResult = ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp);
return savedDataset;
}
use of edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser in project dataverse by IQSS.
the class UpdateDatasetTargetURLCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
if (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser()) {
throw new PermissionException("Update Target URL can only be called by superusers.", this, Collections.singleton(Permission.EditDataset), target);
}
IdServiceBean idServiceBean = IdServiceBean.getBean(target.getProtocol(), ctxt);
HashMap<String, String> metadata = idServiceBean.getMetadataFromDatasetForTargetURL(target);
try {
String doiRetString = idServiceBean.modifyIdentifier(target, metadata);
if (doiRetString != null && doiRetString.contains(target.getIdentifier())) {
target.setGlobalIdCreateTime(new Timestamp(new Date().getTime()));
ctxt.em().merge(target);
ctxt.em().flush();
} else {
// do nothing - we'll know it failed because the global id create time won't have been updated.
}
} catch (Exception e) {
// do nothing - idem and the problem has been logged
}
}
use of edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser in project dataverse by IQSS.
the class UpdateDatasetCommand method save.
public Dataset save(CommandContext ctxt) throws CommandException {
Iterator<DatasetField> dsfIt = theDataset.getEditVersion().getDatasetFields().iterator();
while (dsfIt.hasNext()) {
if (dsfIt.next().removeBlankDatasetFieldValues()) {
dsfIt.remove();
}
}
Iterator<DatasetField> dsfItSort = theDataset.getEditVersion().getDatasetFields().iterator();
while (dsfItSort.hasNext()) {
dsfItSort.next().setValueDisplayOrder();
}
Timestamp updateTime = new Timestamp(new Date().getTime());
theDataset.getEditVersion().setLastUpdateTime(updateTime);
theDataset.setModificationTime(updateTime);
for (DataFile dataFile : theDataset.getFiles()) {
if (dataFile.getCreateDate() == null) {
dataFile.setCreateDate(updateTime);
dataFile.setCreator((AuthenticatedUser) getUser());
}
dataFile.setModificationTime(updateTime);
}
// Remove / delete any files that were removed
// If any of the files that we are deleting has a UNF, we will need to
// re-calculate the UNF of the version - since that is the product
// of the UNFs of the individual files.
boolean recalculateUNF = false;
/* The separate loop is just to make sure that the dataset database is
updated, specifically when an image datafile is being deleted, which
is being used as the dataset thumbnail as part of a batch delete.
if we dont remove the thumbnail association with the dataset before the
actual deletion of the file, it might throw foreign key integration
violation exceptions.
*/
for (FileMetadata fmd : filesToDelete) {
// check if this file is being used as the default thumbnail
if (fmd.getDataFile().equals(theDataset.getThumbnailFile())) {
logger.fine("deleting the dataset thumbnail designation");
theDataset.setThumbnailFile(null);
}
if (fmd.getDataFile().getUnf() != null) {
recalculateUNF = true;
}
}
// we have to merge to update the database but not flush because
// we don't want to create two draft versions!
Dataset tempDataset = ctxt.em().merge(theDataset);
for (FileMetadata fmd : filesToDelete) {
if (!fmd.getDataFile().isReleased()) {
// if file is draft (ie. new to this version, delete; otherwise just remove filemetadata object)
ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest()));
tempDataset.getFiles().remove(fmd.getDataFile());
tempDataset.getEditVersion().getFileMetadatas().remove(fmd);
// todo: clean this up some when we clean the create / update dataset methods
for (DataFileCategory cat : tempDataset.getCategories()) {
cat.getFileMetadatas().remove(fmd);
}
} else {
FileMetadata mergedFmd = ctxt.em().merge(fmd);
ctxt.em().remove(mergedFmd);
fmd.getDataFile().getFileMetadatas().remove(fmd);
tempDataset.getEditVersion().getFileMetadatas().remove(fmd);
}
}
if (recalculateUNF) {
ctxt.ingest().recalculateDatasetVersionUNF(tempDataset.getEditVersion());
}
String nonNullDefaultIfKeyNotFound = "";
String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound);
IdServiceBean idServiceBean = IdServiceBean.getBean(ctxt);
boolean registerWhenPublished = idServiceBean.registerWhenPublished();
logger.log(Level.FINE, "doiProvider={0} protocol={1} GlobalIdCreateTime=={2}", new Object[] { doiProvider, tempDataset.getProtocol(), tempDataset.getGlobalIdCreateTime() });
if (!registerWhenPublished && tempDataset.getGlobalIdCreateTime() == null) {
try {
logger.fine("creating identifier");
String doiRetString = idServiceBean.createIdentifier(tempDataset);
int attempts = 0;
while (!doiRetString.contains(tempDataset.getIdentifier()) && doiRetString.contains("identifier already exists") && attempts < FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT) {
// if the identifier exists, we'll generate another one
// and try to register again... but only up to some
// reasonably high number of times - so that we don't
// go into an infinite loop here, if EZID is giving us
// these duplicate messages in error.
//
// (and we do want the limit to be a "reasonably high" number!
// true, if our identifiers are randomly generated strings,
// then it is highly unlikely that we'll ever run into a
// duplicate race condition repeatedly; but if they are sequential
// numeric values, than it is entirely possible that a large
// enough number of values will be legitimately registered
// by another entity sharing the same authority...)
tempDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(tempDataset, idServiceBean));
doiRetString = idServiceBean.createIdentifier(tempDataset);
attempts++;
}
// we simply give up.
if (doiRetString.contains(tempDataset.getIdentifier())) {
tempDataset.setGlobalIdCreateTime(new Timestamp(new Date().getTime()));
} else if (doiRetString.contains("identifier already exists")) {
logger.warning("EZID refused registration, requested id(s) already in use; gave up after " + attempts + " attempts. Current (last requested) identifier: " + tempDataset.getIdentifier());
} else {
logger.warning("Failed to create identifier (" + tempDataset.getIdentifier() + ") with EZID: " + doiRetString);
}
} catch (Throwable e) {
// EZID probably down
}
}
Dataset savedDataset = ctxt.em().merge(tempDataset);
ctxt.em().flush();
/**
* @todo What should we do with the indexing result? Print it to the
* log?
*/
boolean doNormalSolrDocCleanUp = true;
Future<String> indexingResult = ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp);
// String indexingResult = "(Indexing Skipped)";
// logger.log(Level.INFO, "during dataset save, indexing result was: {0}", indexingResult);
DatasetVersionUser ddu = ctxt.datasets().getDatasetVersionUser(theDataset.getLatestVersion(), this.getUser());
if (ddu != null) {
ddu.setLastUpdateDate(updateTime);
ctxt.em().merge(ddu);
} else {
DatasetVersionUser datasetDataverseUser = new DatasetVersionUser();
datasetDataverseUser.setDatasetVersion(savedDataset.getLatestVersion());
datasetDataverseUser.setLastUpdateDate((Timestamp) updateTime);
String id = getUser().getIdentifier();
id = id.startsWith("@") ? id.substring(1) : id;
AuthenticatedUser au = ctxt.authentication().getAuthenticatedUser(id);
datasetDataverseUser.setAuthenticatedUser(au);
ctxt.em().merge(datasetDataverseUser);
}
return savedDataset;
}
Aggregations