use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class DeleteTemplateCommand method execute.
@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
Dataverse merged = ctxt.em().merge(editedDv);
if (!dvWDefaultTemplate.isEmpty()) {
for (Dataverse remove : dvWDefaultTemplate) {
remove.setDefaultTemplate(null);
ctxt.em().merge(remove);
}
}
Template doomedAndMerged = ctxt.em().merge(doomed);
ctxt.em().remove(doomedAndMerged);
return merged;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class UpdateDataverseGuestbookCommand method execute.
@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
// remove dataset assignments of disabled guestbooks
if (!this.guestbook.isEnabled()) {
ctxt.em().createNativeQuery("Update Dataset set guestbook_id = null " + "WHERE guestbook_id =" + this.guestbook.getId()).executeUpdate();
}
ctxt.em().merge(this.guestbook);
Dataverse result = ctxt.dataverses().save(editedDv);
return result;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class UpdateDataverseThemeCommand method execute.
/**
* Update Theme and Widget related data for this dataverse, and
* do file management needed for theme images.
*
* @param ctxt
* @return
* @throws CommandException
*/
@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
// Get current dataverse, so we can delete current logo file if necessary
Dataverse currentDv = ctxt.dataverses().find(editedDv.getId());
File logoFileDir = new File(logoPath.toFile(), editedDv.getId().toString());
File currentFile = null;
if (currentDv.getDataverseTheme() != null && currentDv.getDataverseTheme().getLogo() != null) {
currentFile = new File(logoFileDir, currentDv.getDataverseTheme().getLogo());
}
try {
// If edited logo field is empty, and a logoFile currently exists, delete it
if (editedDv.getDataverseTheme() == null || editedDv.getDataverseTheme().getLogo() == null) {
if (currentFile != null) {
currentFile.delete();
}
} else // If edited logo file isn't empty,and uploaded File exists, delete currentFile and copy uploaded file from temp dir to logos dir
if (uploadedFile != null) {
File newFile = new File(logoFileDir, editedDv.getDataverseTheme().getLogo());
if (currentFile != null) {
currentFile.delete();
}
logoFileDir.mkdirs();
Files.copy(uploadedFile.toPath(), newFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
// save updated dataverse to db
return ctxt.dataverses().save(editedDv);
} catch (IOException e) {
// improve error handling
throw new CommandException("Error saving logo file", e, this);
}
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class ListDataverseContentCommand method execute.
@Override
public List<DvObject> execute(CommandContext ctxt) throws CommandException {
LinkedList<DvObject> result = new LinkedList<>();
for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) {
try {
ds = ctxt.engine().submit(new GetDatasetCommand(getRequest(), ds));
result.add(ds);
} catch (PermissionException ex) {
}
}
for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) {
try {
dv = ctxt.engine().submit(new GetDataverseCommand(getRequest(), dv));
result.add(dv);
} catch (PermissionException ex) {
}
}
return result;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class HarvesterServiceBean method doHarvest.
/**
* Run a harvest for an individual harvesting Dataverse
* @param dataverseRequest
* @param harvestingClientId
* @throws IOException
*/
public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId) throws IOException {
HarvestingClient harvestingClientConfig = harvestingClientService.find(harvestingClientId);
if (harvestingClientConfig == null) {
throw new IOException("No such harvesting client: id=" + harvestingClientId);
}
Dataverse harvestingDataverse = harvestingClientConfig.getDataverse();
MutableBoolean harvestErrorOccurred = new MutableBoolean(false);
String logTimestamp = logFormatter.format(new Date());
Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingDataverse.getAlias() + logTimestamp);
String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log";
FileHandler fileHandler = new FileHandler(logFileName);
hdLogger.setUseParentHandlers(false);
hdLogger.addHandler(fileHandler);
PrintWriter importCleanupLog = new PrintWriter(new FileWriter("../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt"));
List<Long> harvestedDatasetIds = null;
List<Long> harvestedDatasetIdsThisBatch = new ArrayList<Long>();
List<String> failedIdentifiers = new ArrayList<String>();
List<String> deletedIdentifiers = new ArrayList<String>();
Date harvestStartTime = new Date();
try {
boolean harvestingNow = harvestingClientConfig.isHarvestingNow();
if (harvestingNow) {
harvestErrorOccurred.setValue(true);
hdLogger.log(Level.SEVERE, "Cannot begin harvesting, Dataverse " + harvestingDataverse.getName() + " is currently being harvested.");
} else {
harvestingClientService.resetHarvestInProgress(harvestingClientId);
harvestingClientService.setHarvestInProgress(harvestingClientId, harvestStartTime);
if (harvestingClientConfig.isOai()) {
harvestedDatasetIds = harvestOAI(dataverseRequest, harvestingClientConfig, hdLogger, importCleanupLog, harvestErrorOccurred, failedIdentifiers, deletedIdentifiers, harvestedDatasetIdsThisBatch);
} else {
throw new IOException("Unsupported harvest type");
}
harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size());
hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl() + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix());
hdLogger.log(Level.INFO, "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: " + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size());
// now index all the datasets we have harvested - created, modified or deleted:
/* (TODO: may not be needed at all. In Dataverse4, we may be able to get away with the normal
reindexing after every import. See the rest of the comments about batch indexing throughout
this service bean)
if (this.processedSizeThisBatch > 0) {
hdLogger.log(Level.INFO, "POST HARVEST, reindexing the remaining studies.");
if (this.harvestedDatasetIdsThisBatch != null) {
hdLogger.log(Level.INFO, this.harvestedDatasetIdsThisBatch.size()+" studies in the batch");
}
hdLogger.log(Level.INFO, this.processedSizeThisBatch + " bytes of content");
indexService.updateIndexList(this.harvestedDatasetIdsThisBatch);
hdLogger.log(Level.INFO, "POST HARVEST, calls to index finished.");
} else {
hdLogger.log(Level.INFO, "(All harvested content already reindexed)");
}
*/
}
// mailService.sendHarvestNotification(...getSystemEmail(), harvestingDataverse.getName(), logFileName, logTimestamp, harvestErrorOccurred.booleanValue(), harvestedDatasetIds.size(), failedIdentifiers);
} catch (Throwable e) {
harvestErrorOccurred.setValue(true);
String message = "Exception processing harvest, server= " + harvestingClientConfig.getHarvestingUrl() + ",format=" + harvestingClientConfig.getMetadataPrefix() + " " + e.getClass().getName() + " " + e.getMessage();
hdLogger.log(Level.SEVERE, message);
logException(e, hdLogger);
hdLogger.log(Level.INFO, "HARVEST NOT COMPLETED DUE TO UNEXPECTED ERROR.");
// TODO:
// even though this harvesting run failed, we may have had successfully
// processed some number of datasets, by the time the exception was thrown.
// We should record that number too. And the number of the datasets that
// had failed, that we may have counted. -- L.A. 4.4
harvestingClientService.setHarvestFailure(harvestingClientId, new Date());
} finally {
harvestingClientService.resetHarvestInProgress(harvestingClientId);
fileHandler.close();
hdLogger.removeHandler(fileHandler);
importCleanupLog.close();
}
}
Aggregations