use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class IngestUtilTest method testCheckForDuplicateFileNamesNoDirectories.
@Test
public /**
* Test adding duplicate file name labels to a dataset version with no
* subdirectories.
*/
void testCheckForDuplicateFileNamesNoDirectories() throws Exception {
SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd");
// create dataset
Dataset dataset = makeDataset();
// create dataset version
DatasetVersion datasetVersion = dataset.getEditVersion();
datasetVersion.setCreateTime(dateFmt.parse("20001012"));
datasetVersion.setLastUpdateTime(datasetVersion.getLastUpdateTime());
datasetVersion.setId(MocksFactory.nextId());
datasetVersion.setReleaseTime(dateFmt.parse("20010101"));
datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED);
datasetVersion.setMinorVersionNumber(0L);
datasetVersion.setVersionNumber(1L);
datasetVersion.setFileMetadatas(new ArrayList<>());
// create datafiles
List<DataFile> dataFileList = new ArrayList<>();
DataFile datafile1 = new DataFile("application/octet-stream");
datafile1.setStorageIdentifier("datafile1.txt");
datafile1.setFilesize(200);
datafile1.setModificationTime(new Timestamp(new Date().getTime()));
datafile1.setCreateDate(new Timestamp(new Date().getTime()));
datafile1.setPermissionModificationTime(new Timestamp(new Date().getTime()));
datafile1.setOwner(dataset);
datafile1.setIngestDone();
datafile1.setChecksumType(DataFile.ChecksumType.SHA1);
datafile1.setChecksumValue("Unknown");
// set metadata and add verson
FileMetadata fmd1 = new FileMetadata();
fmd1.setId(1L);
fmd1.setLabel("datafile1.txt");
fmd1.setDataFile(datafile1);
datafile1.getFileMetadatas().add(fmd1);
datasetVersion.getFileMetadatas().add(fmd1);
fmd1.setDatasetVersion(datasetVersion);
dataFileList.add(datafile1);
DataFile datafile2 = new DataFile("application/octet-stream");
datafile2.setStorageIdentifier("datafile2.txt");
datafile2.setFilesize(200);
datafile2.setModificationTime(new Timestamp(new Date().getTime()));
datafile2.setCreateDate(new Timestamp(new Date().getTime()));
datafile2.setPermissionModificationTime(new Timestamp(new Date().getTime()));
datafile2.setOwner(dataset);
datafile2.setIngestDone();
datafile2.setChecksumType(DataFile.ChecksumType.SHA1);
datafile2.setChecksumValue("Unknown");
// set metadata and add version
FileMetadata fmd2 = new FileMetadata();
fmd2.setId(2L);
fmd2.setLabel("datafile2.txt");
fmd2.setDataFile(datafile2);
datafile2.getFileMetadatas().add(fmd2);
datasetVersion.getFileMetadatas().add(fmd2);
fmd2.setDatasetVersion(datasetVersion);
dataFileList.add(datafile2);
IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
boolean file1NameAltered = false;
boolean file2NameAltered = false;
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-1.txt")) {
file1NameAltered = true;
}
if (df.getFileMetadata().getLabel().equals("datafile2-1.txt")) {
file2NameAltered = true;
}
}
// check filenames are unique and unaltered
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) {
file1NameAltered = true;
}
if (df.getFileMetadata().getLabel().equals("datafile2-2.txt")) {
file2NameAltered = true;
}
}
// check filenames are unique and unaltered
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class DeleteDatasetVersionCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
ctxt.permissions().checkEditDatasetLock(doomed, getRequest(), this);
// if you are deleting a dataset that only has 1 draft, we are actually destroying the dataset
if (doomed.getVersions().size() == 1) {
ctxt.engine().submit(new DestroyDatasetCommand(doomed, getRequest()));
} else {
// we are only deleting a version
// todo: for now, it's only the latest and if it's a draft
// but we should add the ability to destroy a specific version
DatasetVersion doomedVersion = doomed.getLatestVersion();
if (doomedVersion.isDraft()) {
Long versionId = doomedVersion.getId();
// files
Iterator<FileMetadata> fmIt = doomedVersion.getFileMetadatas().iterator();
while (fmIt.hasNext()) {
FileMetadata fmd = fmIt.next();
if (!fmd.getDataFile().isReleased()) {
// if file is draft (ie. new to this version, delete
// and remove fileMetadata from list (so that it won't try to merge)
ctxt.engine().submit(new DeleteDataFileCommand(fmd.getDataFile(), getRequest()));
fmIt.remove();
}
}
DatasetVersion doomedAndMerged = ctxt.em().merge(doomedVersion);
ctxt.em().remove(doomedAndMerged);
// remove version from ds obj before indexing....
Iterator<DatasetVersion> dvIt = doomed.getVersions().iterator();
while (dvIt.hasNext()) {
DatasetVersion dv = dvIt.next();
if (versionId.equals(dv.getId())) {
dvIt.remove();
}
}
/**
* DeleteDatasetDraft, which is required by this command,
* DeleteDatasetVersionCommand is not sufficient for running
* GetPrivateUrlCommand nor DeletePrivateUrlCommand, both of
* which require ManageDatasetPermissions because
* DeletePrivateUrlCommand calls RevokeRoleCommand which
* requires ManageDatasetPermissions when executed on a dataset
* so we make direct calls to the service bean so that a lowly
* Contributor who does NOT have ManageDatasetPermissions can
* still successfully delete a Private URL.
*/
PrivateUrl privateUrl = ctxt.privateUrl().getPrivateUrlFromDatasetId(doomed.getId());
if (privateUrl != null) {
logger.fine("Deleting Private URL for dataset id " + doomed.getId());
PrivateUrlUser privateUrlUser = new PrivateUrlUser(doomed.getId());
List<RoleAssignment> roleAssignments = ctxt.roles().directRoleAssignments(privateUrlUser, doomed);
for (RoleAssignment roleAssignment : roleAssignments) {
ctxt.roles().revoke(roleAssignment);
}
}
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(doomed, doNormalSolrDocCleanUp);
return;
}
throw new IllegalCommandException("Cannot delete a released version", this);
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class CreateDatasetVersionCommand method execute.
@Override
public DatasetVersion execute(CommandContext ctxt) throws CommandException {
DatasetVersion latest = dataset.getLatestVersion();
if (latest.isWorkingCopy()) {
// before throwing an Exception
if (newVersion.getVersionState().equals(VersionState.DRAFT)) {
throw new IllegalCommandException("Latest version is already a draft. Cannot add another draft", this);
}
}
newVersion.setDataset(dataset);
newVersion.setDatasetFields(newVersion.initDatasetFields());
Set<ConstraintViolation> constraintViolations = newVersion.validate();
if (!constraintViolations.isEmpty()) {
String validationFailedString = "Validation failed:";
for (ConstraintViolation constraintViolation : constraintViolations) {
validationFailedString += " " + constraintViolation.getMessage();
}
throw new IllegalCommandException(validationFailedString, this);
}
Iterator<DatasetField> dsfIt = newVersion.getDatasetFields().iterator();
while (dsfIt.hasNext()) {
if (dsfIt.next().removeBlankDatasetFieldValues()) {
dsfIt.remove();
}
}
Iterator<DatasetField> dsfItSort = newVersion.getDatasetFields().iterator();
while (dsfItSort.hasNext()) {
dsfItSort.next().setValueDisplayOrder();
}
List<FileMetadata> newVersionMetadatum = new ArrayList<>(latest.getFileMetadatas().size());
for (FileMetadata fmd : latest.getFileMetadatas()) {
FileMetadata fmdCopy = fmd.createCopy();
fmdCopy.setDatasetVersion(newVersion);
newVersionMetadatum.add(fmdCopy);
}
newVersion.setFileMetadatas(newVersionMetadatum);
Timestamp now = new Timestamp(new Date().getTime());
newVersion.setCreateTime(now);
newVersion.setLastUpdateTime(now);
dataset.setModificationTime(now);
newVersion.setDataset(dataset);
final List<DatasetVersion> currentVersions = dataset.getVersions();
ArrayList<DatasetVersion> dsvs = new ArrayList<>(currentVersions.size());
dsvs.addAll(currentVersions);
dsvs.add(0, newVersion);
dataset.setVersions(dsvs);
// ctxt.index().indexDataset(dataset);
return ctxt.datasets().storeVersion(newVersion);
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class CreatePrivateUrlCommand method execute.
@Override
public PrivateUrl execute(CommandContext ctxt) throws CommandException {
logger.fine("Executing CreatePrivateUrlCommand...");
if (dataset == null) {
/**
* @todo Internationalize this.
*/
String message = "Can't create Private URL. Dataset is null.";
logger.info(message);
throw new IllegalCommandException(message, this);
}
PrivateUrl existing = ctxt.privateUrl().getPrivateUrlFromDatasetId(dataset.getId());
if (existing != null) {
/**
* @todo Internationalize this.
*/
String message = "Private URL already exists for dataset id " + dataset.getId() + ".";
logger.info(message);
throw new IllegalCommandException(message, this);
}
DatasetVersion latestVersion = dataset.getLatestVersion();
if (!latestVersion.isDraft()) {
/**
* @todo Internationalize this.
*/
String message = "Can't create Private URL because the latest version of dataset id " + dataset.getId() + " is not a draft.";
logger.info(message);
throw new IllegalCommandException(message, this);
}
PrivateUrlUser privateUrlUser = new PrivateUrlUser(dataset.getId());
DataverseRole memberRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.MEMBER);
final String privateUrlToken = UUID.randomUUID().toString();
RoleAssignment roleAssignment = ctxt.engine().submit(new AssignRoleCommand(privateUrlUser, memberRole, dataset, getRequest(), privateUrlToken));
PrivateUrl privateUrl = new PrivateUrl(roleAssignment, dataset, ctxt.systemConfig().getDataverseSiteUrl());
return privateUrl;
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class DeaccessionDatasetVersionCommand method execute.
@Override
public DatasetVersion execute(CommandContext ctxt) throws CommandException {
Dataset ds = theVersion.getDataset();
theVersion.setVersionState(DatasetVersion.VersionState.DEACCESSIONED);
/* We do not want to delete the identifier if the dataset is completely deaccessioned
logger.fine("deleteDOIIdentifier=" + deleteDOIIdentifier);
if (deleteDOIIdentifier) {
String nonNullDefaultIfKeyNotFound = "";
String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
ArrayList<String> currentProtocol = new ArrayList<>();
currentProtocol.add(protocol);
IdServiceBean idServiceBean = IdServiceBean.getBean(ctxt);
logger.fine("protocol=" + protocol);
try {
idServiceBean.deleteIdentifier(ds);
} catch (Exception e) {
if (e.toString().contains("Internal Server Error")) {
throw new CommandException(BundleUtil.getStringFromBundle("dataset.publish.error", idServiceBean.getProviderInformation()),this);
}
throw new CommandException(BundleUtil.getStringFromBundle("dataset.delete.error", currentProtocol),this);
}
}*/
DatasetVersion managed = ctxt.em().merge(theVersion);
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(managed.getDataset(), doNormalSolrDocCleanUp);
ExportService instance = ExportService.getInstance(ctxt.settings());
if (managed.getDataset().getReleasedVersion() != null) {
try {
instance.exportAllFormats(managed.getDataset());
} catch (ExportException ex) {
// Something went wrong!
// But we're not going to treat it as a fatal condition.
}
} else {
try {
// otherwise, we need to wipe clean the exports we may have cached:
instance.clearAllCachedFormats(managed.getDataset());
} catch (IOException ex) {
// Try catch required due to original method for clearing cached metadata (non fatal)
}
}
// And save the dataset, to get the "last exported" timestamp right:
Dataset managedDs = ctxt.em().merge(managed.getDataset());
return managed;
}
Aggregations