use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class OAIRecordServiceBean method updateOaiRecordsForDataset.
// Updates any existing OAI records for this dataset
// Should be called whenever there's a change in the release status of the Dataset
// (i.e., when it's published or deaccessioned), so that the timestamps and
// on the records could be freshened before the next reexport of the corresponding
// sets.
// *Note* that the method assumes that a full metadata reexport has already
// been attempted on the dataset. (Meaning that if getLastExportTime is null,
// we'll just assume that the exports failed and the OAI records must be marked
// as "deleted".
@TransactionAttribute(REQUIRES_NEW)
public void updateOaiRecordsForDataset(Dataset dataset) {
// create Map of OaiRecords
List<OAIRecord> oaiRecords = findOaiRecordsByGlobalId(dataset.getGlobalId());
if (oaiRecords != null) {
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (releasedVersion == null || dataset.getLastExportTime() == null) {
// Datast must have been deaccessioned.
markOaiRecordsAsRemoved(oaiRecords, new Date(), logger);
return;
}
for (OAIRecord record : oaiRecords) {
if (record.isRemoved()) {
logger.fine("\"un-deleting\" an existing OAI Record for " + dataset.getGlobalId());
record.setRemoved(false);
record.setLastUpdateTime(new Date());
} else if (dataset.getLastExportTime().after(record.getLastUpdateTime())) {
record.setLastUpdateTime(new Date());
}
}
} else {
logger.fine("Null returned - no records found.");
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class OAIRecordServiceBean method updateOaiRecords.
public void updateOaiRecords(String setName, List<Long> datasetIds, Date updateTime, boolean doExport, Logger setUpdateLogger) {
// create Map of OaiRecords
List<OAIRecord> oaiRecords = findOaiRecordsBySetName(setName);
Map<String, OAIRecord> recordMap = new HashMap<>();
if (oaiRecords != null) {
for (OAIRecord record : oaiRecords) {
// look for duplicates here? delete?
recordMap.put(record.getGlobalId(), record);
}
} else {
setUpdateLogger.fine("Null returned - no records found.");
}
if (!recordMap.isEmpty()) {
setUpdateLogger.fine("Found " + recordMap.size() + " existing records");
} else {
setUpdateLogger.fine("No records in the set yet.");
}
if (datasetIds != null) {
for (Long datasetId : datasetIds) {
setUpdateLogger.fine("processing dataset id=" + datasetId);
Dataset dataset = datasetService.find(datasetId);
if (dataset == null) {
setUpdateLogger.fine("failed to find dataset!");
} else {
setUpdateLogger.fine("found dataset.");
// TODO: option to *force* export?
if (doExport) {
// TODO:
// Review this logic - specifically for handling of
// deaccessioned datasets. -- L.A. 4.5
// OK, it looks like we can't rely on .getPublicationDate() -
// as it is essentially the *first publication* date;
// and we are interested in the *last*
DatasetVersion releasedVersion = dataset.getReleasedVersion();
Date publicationDate = releasedVersion == null ? null : releasedVersion.getReleaseTime();
if (publicationDate != null && (dataset.getLastExportTime() == null || dataset.getLastExportTime().before(publicationDate))) {
setUpdateLogger.fine("Attempting to run export on dataset " + dataset.getGlobalId());
exportAllFormats(dataset);
}
}
setUpdateLogger.fine("\"last exported\" timestamp: " + dataset.getLastExportTime());
em.refresh(dataset);
setUpdateLogger.fine("\"last exported\" timestamp, after db refresh: " + dataset.getLastExportTime());
updateOaiRecordForDataset(dataset, setName, recordMap, setUpdateLogger);
}
}
}
// anything left in the map should be marked as removed!
markOaiRecordsAsRemoved(recordMap.values(), updateTime, setUpdateLogger);
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class RestrictFileCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
// check if public install & don't allow
boolean defaultValue = false;
boolean publicInstall = ctxt.settings().isTrueForKey(SettingsServiceBean.Key.PublicInstall, defaultValue);
if (publicInstall) {
throw new CommandExecutionException("Restricting files is not permitted on a public installation.", this);
}
if (file.getOwner() == null) {
// this is a new file through upload, restrict
file.getFileMetadata().setRestricted(restrict);
file.setRestricted(restrict);
} else {
Dataset dataset = file.getOwner();
DatasetVersion workingVersion = dataset.getEditVersion();
// check if this file is already restricted or already unrestricted
if ((restrict && file.getFileMetadata().isRestricted()) || (!restrict && !file.getFileMetadata().isRestricted())) {
String text = restrict ? "restricted" : "unrestricted";
throw new CommandExecutionException("File " + file.getDisplayName() + " is already " + text, this);
}
// check if this dataset is a draft (should be), then we can update restrict
if (workingVersion.isDraft()) {
// because we must update the working version metadata
if (dataset.isReleased()) {
for (FileMetadata fmw : workingVersion.getFileMetadatas()) {
if (file.equals(fmw.getDataFile())) {
fmw.setRestricted(restrict);
if (!file.isReleased()) {
file.setRestricted(restrict);
}
}
}
} else {
file.getFileMetadata().setRestricted(restrict);
if (!file.isReleased()) {
file.setRestricted(restrict);
}
if (file.getFileMetadata().isRestricted() != restrict) {
throw new CommandExecutionException("Failed to update the file metadata", this);
}
}
} else {
throw new CommandExecutionException("Working version must be a draft", this);
}
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class DDIExportServiceBean method export.
/*
* Workhorse methods, that do all the work:
*/
private void export(String objectTag, Long objectId, OutputStream os, String partialExclude, String partialInclude) {
/*
* Some checks will need to be here, to see if the corresponding dataset
* is released, if all the permissions are satisfied, etc., with
* approrpiate exceptions thrown otherwise.
*
* something like
throw new IllegalArgumentException("ExportStudy called with a null study.");
throw new IllegalArgumentException("Study does not have released version, study.id = " + s.getId());
*/
Set<String> includedFieldSet = null;
Set<String> excludedFieldSet = null;
DatasetVersion releasedVersion = null;
if (partialExclude != null && !"".equals(partialExclude)) {
excludedFieldSet = new HashSet<String>();
String[] splitTokens = partialExclude.split(",");
for (int i = 0; i < splitTokens.length; i++) {
if (splitTokens[i] != null && !splitTokens[i].equals("")) {
excludedFieldSet.add(splitTokens[i]);
}
}
}
if (partialInclude != null && !"".equals(partialInclude)) {
includedFieldSet = new HashSet<String>();
String[] splitTokens = partialInclude.split(",");
for (int i = 0; i < splitTokens.length; i++) {
if (splitTokens[i] != null && !splitTokens[i].equals("")) {
includedFieldSet.add(splitTokens[i]);
}
}
}
// Create XML Stream Writer, using the supplied OutputStream:
XMLStreamWriter xmlw = null;
// Try to resolve the supplied object id:
Object dataObject = null;
if (OBJECT_TAG_VARIABLE.equals(objectTag)) {
dataObject = variableService.find(objectId);
if (dataObject == null) {
throw new IllegalArgumentException("Metadata Export: Invalid variable id supplied.");
}
} else if (OBJECT_TAG_DATAFILE.equals(objectTag)) {
dataObject = fileService.find(objectId);
if (dataObject == null) {
throw new IllegalArgumentException("Metadata Export: Invalid datafile id supplied.");
}
} else if (OBJECT_TAG_DATASET.equals(objectTag)) {
dataObject = datasetService.find(objectId);
if (dataObject == null) {
throw new IllegalArgumentException("Metadata Export: Invalid dataset id supplied.");
}
releasedVersion = ((Dataset) dataObject).getReleasedVersion();
if (releasedVersion == null) {
throw new IllegalArgumentException("Metadata Export: Dataset not released.");
}
} else {
throw new IllegalArgumentException("Metadata Export: Unsupported export requested.");
}
try {
xmlw = xmlOutputFactory.createXMLStreamWriter(os);
xmlw.writeStartDocument();
if (OBJECT_TAG_VARIABLE.equals(objectTag)) {
createVarDDI(xmlw, excludedFieldSet, includedFieldSet, (DataVariable) dataObject);
} else if (OBJECT_TAG_DATAFILE.equals(objectTag)) {
createDataFileDDI(xmlw, excludedFieldSet, includedFieldSet, (DataFile) dataObject);
} else if (OBJECT_TAG_DATASET.equals(objectTag)) {
createDatasetDDI(xmlw, excludedFieldSet, includedFieldSet, releasedVersion);
}
xmlw.writeEndDocument();
} catch (XMLStreamException ex) {
Logger.getLogger("global").log(Level.SEVERE, null, ex);
throw new EJBException("ERROR occurred during partial export of a study.", ex);
} finally {
try {
if (xmlw != null) {
xmlw.close();
}
} catch (XMLStreamException ex) {
}
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class ExportService method exportAllFormats.
// This method goes through all the Exporters and calls
// the "chacheExport()" method that will save the produced output
// in a file in the dataset directory, on each Exporter available.
public void exportAllFormats(Dataset dataset) throws ExportException {
try {
clearAllCachedFormats(dataset);
} catch (IOException ex) {
Logger.getLogger(ExportService.class.getName()).log(Level.SEVERE, null, ex);
}
try {
DatasetVersion releasedVersion = dataset.getReleasedVersion();
if (releasedVersion == null) {
throw new ExportException("No released version for dataset " + dataset.getGlobalId());
}
JsonPrinter jsonPrinter = new JsonPrinter(settingsService);
final JsonObjectBuilder datasetAsJsonBuilder = jsonPrinter.jsonAsDatasetDto(releasedVersion);
JsonObject datasetAsJson = datasetAsJsonBuilder.build();
Iterator<Exporter> exporters = loader.iterator();
while (exporters.hasNext()) {
Exporter e = exporters.next();
String formatName = e.getProviderName();
cacheExport(releasedVersion, formatName, datasetAsJson, e);
}
} catch (ServiceConfigurationError serviceError) {
throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
}
// Finally, if we have been able to successfully export in all available
// formats, we'll increment the "last exported" time stamp:
dataset.setLastExportTime(new Timestamp(new Date().getTime()));
}
Aggregations