use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ListDataverseContentCommand method execute.
@Override
public List<DvObject> execute(CommandContext ctxt) throws CommandException {
LinkedList<DvObject> result = new LinkedList<>();
for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) {
try {
ds = ctxt.engine().submit(new GetDatasetCommand(getRequest(), ds));
result.add(ds);
} catch (PermissionException ex) {
}
}
for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) {
try {
dv = ctxt.engine().submit(new GetDataverseCommand(getRequest(), dv));
result.add(dv);
} catch (PermissionException ex) {
}
}
return result;
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class HarvesterServiceBean method processRecord.
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public Long processRecord(DataverseRequest dataverseRequest, Logger hdLogger, PrintWriter importCleanupLog, OaiHandler oaiHandler, String identifier, MutableBoolean recordErrorOccurred, MutableLong processedSizeThisBatch, List<String> deletedIdentifiers) {
String errMessage = null;
Dataset harvestedDataset = null;
logGetRecord(hdLogger, oaiHandler, identifier);
File tempFile = null;
try {
FastGetRecord record = oaiHandler.runGetRecord(identifier);
errMessage = record.getErrorMessage();
if (errMessage != null) {
hdLogger.log(Level.SEVERE, "Error calling GetRecord - " + errMessage);
} else if (record.isDeleted()) {
hdLogger.info("Deleting harvesting dataset for " + identifier + ", per the OAI server's instructions.");
Dataset dataset = datasetService.getDatasetByHarvestInfo(oaiHandler.getHarvestingClient().getDataverse(), identifier);
if (dataset != null) {
hdLogger.info("Deleting dataset " + dataset.getGlobalId());
deleteHarvestedDataset(dataset, dataverseRequest, hdLogger);
// TODO:
// check the status of that Delete - see if it actually succeeded
deletedIdentifiers.add(identifier);
} else {
hdLogger.info("No dataset found for " + identifier + ", skipping delete. ");
}
} else {
hdLogger.info("Successfully retrieved GetRecord response.");
tempFile = record.getMetadataFile();
PrintWriter cleanupLog;
harvestedDataset = importService.doImportHarvestedDataset(dataverseRequest, oaiHandler.getHarvestingClient(), identifier, oaiHandler.getMetadataPrefix(), record.getMetadataFile(), importCleanupLog);
hdLogger.fine("Harvest Successful for identifier " + identifier);
hdLogger.fine("Size of this record: " + record.getMetadataFile().length());
processedSizeThisBatch.add(record.getMetadataFile().length());
}
} catch (Throwable e) {
logGetRecordException(hdLogger, oaiHandler, identifier, e);
errMessage = "Caught exception while executing GetRecord on " + identifier;
// logException(e, hdLogger);
} finally {
if (tempFile != null) {
// temporary - let's not delete the temp metadata file if anything went wrong, for now:
if (errMessage == null) {
try {
tempFile.delete();
} catch (Throwable t) {
}
;
}
}
}
if (errMessage != null) {
if (recordErrorOccurred != null) {
recordErrorOccurred.setValue(true);
} else {
throw new EJBException(errMessage);
}
}
return harvestedDataset != null ? harvestedDataset.getId() : null;
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ExportService method getCachedExportFormat.
// This method checks if the metadata has already been exported in this
// format and cached on disk. If it has, it'll open the file and retun
// the file input stream. If not, it'll return null.
private InputStream getCachedExportFormat(Dataset dataset, String formatName) throws ExportException, IOException {
StorageIO<Dataset> dataAccess = null;
try {
dataAccess = DataAccess.getStorageIO(dataset);
} catch (IOException ioex) {
throw new IOException("IO Exception thrown exporting as " + "export_" + formatName + ".cached");
}
InputStream cachedExportInputStream = null;
try {
if (dataAccess.getAuxFileAsInputStream("export_" + formatName + ".cached") != null) {
cachedExportInputStream = dataAccess.getAuxFileAsInputStream("export_" + formatName + ".cached");
return cachedExportInputStream;
}
} catch (IOException ioex) {
throw new IOException("IO Exception thrown exporting as " + "export_" + formatName + ".cached");
}
return null;
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ExportService method cacheExport.
// This method runs the selected metadata exporter, caching the output
// in a file in the dataset directory / container based on its DOI:
private void cacheExport(DatasetVersion version, String format, JsonObject datasetAsJson, Exporter exporter) throws ExportException {
try {
// With some storage drivers, we can open a WritableChannel, or OutputStream
// to directly write the generated metadata export that we want to cache;
// Some drivers (like Swift) do not support that, and will give us an
// "operation not supported" exception. If that's the case, we'll have
// to save the output into a temp file, and then copy it over to the
// permanent storage using the IO "save" command:
boolean tempFileRequired = false;
File tempFile = null;
OutputStream outputStream = null;
Dataset dataset = version.getDataset();
StorageIO<Dataset> storageIO = null;
try {
storageIO = DataAccess.createNewStorageIO(dataset, "placeholder");
Channel outputChannel = storageIO.openAuxChannel("export_" + format + ".cached", DataAccessOption.WRITE_ACCESS);
outputStream = Channels.newOutputStream((WritableByteChannel) outputChannel);
} catch (IOException ioex) {
tempFileRequired = true;
tempFile = File.createTempFile("tempFileToExport", ".tmp");
outputStream = new FileOutputStream(tempFile);
}
try {
Path cachedMetadataFilePath = Paths.get(version.getDataset().getFileSystemDirectory().toString(), "export_" + format + ".cached");
if (!tempFileRequired) {
FileOutputStream cachedExportOutputStream = new FileOutputStream(cachedMetadataFilePath.toFile());
exporter.exportDataset(version, datasetAsJson, cachedExportOutputStream);
cachedExportOutputStream.flush();
cachedExportOutputStream.close();
} else {
// this method copies a local filesystem Path into this DataAccess Auxiliary location:
exporter.exportDataset(version, datasetAsJson, outputStream);
outputStream.flush();
outputStream.close();
logger.fine("Saving path as aux for temp file in: " + Paths.get(tempFile.getAbsolutePath()));
storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), "export_" + format + ".cached");
boolean tempFileDeleted = tempFile.delete();
logger.fine("tempFileDeleted: " + tempFileDeleted);
}
} catch (IOException ioex) {
throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached");
}
} catch (IOException ioex) {
throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached");
}
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ExportService method clearCachedExport.
private void clearCachedExport(Dataset dataset, String format) throws IOException {
try {
StorageIO<Dataset> storageIO = getStorageIO(dataset);
storageIO.deleteAuxObject("export_" + format + ".cached");
} catch (IOException ex) {
throw new IOException("IO Exception thrown exporting as " + "export_" + format + ".cached");
}
}
Aggregations