use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class UpdateDatasetThumbnailCommand method execute.
@Override
public DatasetThumbnail execute(CommandContext ctxt) throws CommandException {
if (dataset == null) {
String message = "Can't update dataset thumbnail. Dataset is null.";
logger.info(message);
throw new IllegalCommandException(message, this);
}
// }
if (userIntent == null) {
throw new IllegalCommandException("No changes to save.", this);
}
switch(userIntent) {
case setDatasetFileAsThumbnail:
if (dataFileIdSupplied == null) {
throw new CommandException("A file was not selected to be the new dataset thumbnail.", this);
}
DataFile datasetFileThumbnailToSwitchTo = ctxt.files().find(dataFileIdSupplied);
if (datasetFileThumbnailToSwitchTo == null) {
throw new CommandException("Could not find file based on id supplied: " + dataFileIdSupplied + ".", this);
}
Dataset ds1 = ctxt.datasets().setDatasetFileAsThumbnail(dataset, datasetFileThumbnailToSwitchTo);
DatasetThumbnail datasetThumbnail = ds1.getDatasetThumbnail();
if (datasetThumbnail != null) {
DataFile dataFile = datasetThumbnail.getDataFile();
if (dataFile != null) {
if (dataFile.getId().equals(dataFileIdSupplied)) {
return datasetThumbnail;
} else {
throw new CommandException("Dataset thumbnail is should be based on file id " + dataFile.getId() + " but instead it is " + dataFileIdSupplied + ".", this);
}
}
} else {
throw new CommandException("Dataset thumbnail is unexpectedly absent.", this);
}
case setNonDatasetFileAsThumbnail:
File uploadedFile;
try {
uploadedFile = FileUtil.inputStreamToFile(inputStream);
} catch (IOException ex) {
throw new CommandException("In setNonDatasetFileAsThumbnail caught exception calling inputStreamToFile: " + ex, this);
}
if (uploadedFile == null) {
throw new CommandException("In setNonDatasetFileAsThumbnail uploadedFile was null.", this);
}
long uploadLogoSizeLimit = ctxt.systemConfig().getUploadLogoSizeLimit();
if (uploadedFile.length() > uploadLogoSizeLimit) {
throw new IllegalCommandException("File is larger than maximum size: " + uploadLogoSizeLimit + ".", this);
}
FileInputStream fileAsStream = null;
try {
fileAsStream = new FileInputStream(uploadedFile);
} catch (FileNotFoundException ex) {
Logger.getLogger(UpdateDatasetThumbnailCommand.class.getName()).log(Level.SEVERE, null, ex);
}
Dataset datasetWithNewThumbnail = ctxt.datasets().setNonDatasetFileAsThumbnail(dataset, fileAsStream);
if (datasetWithNewThumbnail != null) {
return datasetWithNewThumbnail.getDatasetThumbnail();
} else {
return null;
}
case removeThumbnail:
Dataset ds2 = ctxt.datasets().removeDatasetThumbnail(dataset);
DatasetThumbnail datasetThumbnail2 = ds2.getDatasetThumbnail();
if (datasetThumbnail2 == null) {
return null;
} else {
throw new CommandException("User wanted to remove the thumbnail it still has one!", this);
}
default:
throw new IllegalCommandException("Whatever you are trying to do to the dataset thumbnail is not supported.", this);
}
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ReturnDatasetToAuthorCommand method execute.
@Override
public Dataset execute(CommandContext ctxt) throws CommandException {
if (theDataset == null) {
throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.reject.datasetNull"), this);
}
if (!theDataset.getLatestVersion().isInReview()) {
throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.reject.datasetNotInReview"), this);
}
/*
if(theDataset.getLatestVersion().getReturnReason() == null || theDataset.getLatestVersion().getReturnReason().isEmpty()){
throw new IllegalCommandException("You must enter a reason for returning a dataset to the author(s).", this);
}
*/
ctxt.engine().submit(new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.InReview));
Dataset updatedDataset = save(ctxt);
return updatedDataset;
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class UpdateDatasetVersionCommand method execute.
@Override
public DatasetVersion execute(CommandContext ctxt) throws CommandException {
Dataset ds = newVersion.getDataset();
ctxt.permissions().checkEditDatasetLock(ds, getRequest(), this);
DatasetVersion latest = ds.getLatestVersion();
if (latest == null) {
throw new IllegalCommandException("Dataset " + ds.getId() + " does not have a latest version.", this);
}
if (!latest.isDraft()) {
throw new IllegalCommandException("Cannot update a dataset version that's not a draft", this);
}
DatasetVersion edit = ds.getEditVersion();
edit.setDatasetFields(newVersion.getDatasetFields());
edit.setDatasetFields(edit.initDatasetFields());
Set<ConstraintViolation> constraintViolations = edit.validate();
if (!constraintViolations.isEmpty()) {
String validationFailedString = "Validation failed:";
for (ConstraintViolation constraintViolation : constraintViolations) {
validationFailedString += " " + constraintViolation.getMessage();
}
throw new IllegalCommandException(validationFailedString, this);
}
Iterator<DatasetField> dsfIt = edit.getDatasetFields().iterator();
while (dsfIt.hasNext()) {
if (dsfIt.next().removeBlankDatasetFieldValues()) {
dsfIt.remove();
}
}
Timestamp now = new Timestamp(new Date().getTime());
edit.setLastUpdateTime(now);
ds.setModificationTime(now);
DatasetVersion managed = ctxt.em().merge(edit);
boolean doNormalSolrDocCleanUp = true;
ctxt.index().indexDataset(managed.getDataset(), doNormalSolrDocCleanUp);
return managed;
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class Datasets method exportDataset.
// TODO:
// This API call should, ideally, call findUserOrDie() and the GetDatasetCommand
// to obtain the dataset that we are trying to export - which would handle
// Auth in the process... For now, Auth isn't necessary - since export ONLY
// WORKS on published datasets, which are open to the world. -- L.A. 4.5
@GET
@Path("/export")
@Produces({ "application/xml", "application/json" })
public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter) {
try {
Dataset dataset = datasetService.findByGlobalId(persistentId);
if (dataset == null) {
return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found.");
}
ExportService instance = ExportService.getInstance(settingsSvc);
String xml = instance.getExportAsString(dataset, exporter);
// I'm wondering if this going to become a performance problem
// with really GIANT datasets,
// the fact that we are passing these exports, blobs of JSON, and,
// especially, DDI XML as complete strings. It would be nicer
// if we could stream instead - and the export service already can
// give it to as as a stream; then we could start sending the
// output to the remote client as soon as we got the first bytes,
// without waiting for the whole thing to be generated and buffered...
// (the way Access API streams its output).
// -- L.A., 4.5
logger.fine("xml to return: " + xml);
String mediaType = MediaType.TEXT_PLAIN;
if (instance.isXMLFormat(exporter)) {
mediaType = MediaType.APPLICATION_XML;
}
return allowCors(Response.ok().entity(xml).type(mediaType).build());
} catch (Exception wr) {
return error(Response.Status.FORBIDDEN, "Export Failed");
}
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class Datasets method getDataset.
@GET
@Path("{id}")
public Response getDataset(@PathParam("id") String id) {
return response(req -> {
final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id)));
final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
final JsonObjectBuilder jsonbuilder = json(retrieved);
return allowCors(ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)));
});
}
Aggregations