Search in sources :

Example 71 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class ListDataverseContentCommand method execute.

@Override
public List<DvObject> execute(CommandContext ctxt) throws CommandException {
    LinkedList<DvObject> result = new LinkedList<>();
    for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) {
        try {
            ds = ctxt.engine().submit(new GetDatasetCommand(getRequest(), ds));
            result.add(ds);
        } catch (PermissionException ex) {
        }
    }
    for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) {
        try {
            dv = ctxt.engine().submit(new GetDataverseCommand(getRequest(), dv));
            result.add(dv);
        } catch (PermissionException ex) {
        }
    }
    return result;
}
Also used : PermissionException(edu.harvard.iq.dataverse.engine.command.exception.PermissionException) DvObject(edu.harvard.iq.dataverse.DvObject) Dataset(edu.harvard.iq.dataverse.Dataset) Dataverse(edu.harvard.iq.dataverse.Dataverse) LinkedList(java.util.LinkedList)

Example 72 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class HarvesterServiceBean method processRecord.

@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public Long processRecord(DataverseRequest dataverseRequest, Logger hdLogger, PrintWriter importCleanupLog, OaiHandler oaiHandler, String identifier, MutableBoolean recordErrorOccurred, MutableLong processedSizeThisBatch, List<String> deletedIdentifiers) {
    String errMessage = null;
    Dataset harvestedDataset = null;
    logGetRecord(hdLogger, oaiHandler, identifier);
    File tempFile = null;
    try {
        FastGetRecord record = oaiHandler.runGetRecord(identifier);
        errMessage = record.getErrorMessage();
        if (errMessage != null) {
            hdLogger.log(Level.SEVERE, "Error calling GetRecord - " + errMessage);
        } else if (record.isDeleted()) {
            hdLogger.info("Deleting harvesting dataset for " + identifier + ", per the OAI server's instructions.");
            Dataset dataset = datasetService.getDatasetByHarvestInfo(oaiHandler.getHarvestingClient().getDataverse(), identifier);
            if (dataset != null) {
                hdLogger.info("Deleting dataset " + dataset.getGlobalId());
                deleteHarvestedDataset(dataset, dataverseRequest, hdLogger);
                // TODO:
                // check the status of that Delete - see if it actually succeeded
                deletedIdentifiers.add(identifier);
            } else {
                hdLogger.info("No dataset found for " + identifier + ", skipping delete. ");
            }
        } else {
            hdLogger.info("Successfully retrieved GetRecord response.");
            tempFile = record.getMetadataFile();
            PrintWriter cleanupLog;
            harvestedDataset = importService.doImportHarvestedDataset(dataverseRequest, oaiHandler.getHarvestingClient(), identifier, oaiHandler.getMetadataPrefix(), record.getMetadataFile(), importCleanupLog);
            hdLogger.fine("Harvest Successful for identifier " + identifier);
            hdLogger.fine("Size of this record: " + record.getMetadataFile().length());
            processedSizeThisBatch.add(record.getMetadataFile().length());
        }
    } catch (Throwable e) {
        logGetRecordException(hdLogger, oaiHandler, identifier, e);
        errMessage = "Caught exception while executing GetRecord on " + identifier;
    // logException(e, hdLogger);
    } finally {
        if (tempFile != null) {
            // temporary - let's not delete the temp metadata file if anything went wrong, for now:
            if (errMessage == null) {
                try {
                    tempFile.delete();
                } catch (Throwable t) {
                }
                ;
            }
        }
    }
    if (errMessage != null) {
        if (recordErrorOccurred != null) {
            recordErrorOccurred.setValue(true);
        } else {
            throw new EJBException(errMessage);
        }
    }
    return harvestedDataset != null ? harvestedDataset.getId() : null;
}
Also used : Dataset(edu.harvard.iq.dataverse.Dataset) EJBException(javax.ejb.EJBException) DataFile(edu.harvard.iq.dataverse.DataFile) File(java.io.File) PrintWriter(java.io.PrintWriter) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 73 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class ExportService method getCachedExportFormat.

// This method checks if the metadata has already been exported in this
// format and cached on disk. If it has, it'll open the file and retun
// the file input stream. If not, it'll return null.
private InputStream getCachedExportFormat(Dataset dataset, String formatName) throws ExportException, IOException {
    StorageIO<Dataset> dataAccess = null;
    try {
        dataAccess = DataAccess.getStorageIO(dataset);
    } catch (IOException ioex) {
        throw new IOException("IO Exception thrown exporting as " + "export_" + formatName + ".cached");
    }
    InputStream cachedExportInputStream = null;
    try {
        if (dataAccess.getAuxFileAsInputStream("export_" + formatName + ".cached") != null) {
            cachedExportInputStream = dataAccess.getAuxFileAsInputStream("export_" + formatName + ".cached");
            return cachedExportInputStream;
        }
    } catch (IOException ioex) {
        throw new IOException("IO Exception thrown exporting as " + "export_" + formatName + ".cached");
    }
    return null;
}
Also used : Dataset(edu.harvard.iq.dataverse.Dataset) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) IOException(java.io.IOException)

Example 74 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class ExportService method cacheExport.

// This method runs the selected metadata exporter, caching the output
// in a file in the dataset directory / container based on its DOI:
private void cacheExport(DatasetVersion version, String format, JsonObject datasetAsJson, Exporter exporter) throws ExportException {
    try {
        // With some storage drivers, we can open a WritableChannel, or OutputStream
        // to directly write the generated metadata export that we want to cache;
        // Some drivers (like Swift) do not support that, and will give us an
        // "operation not supported" exception. If that's the case, we'll have
        // to save the output into a temp file, and then copy it over to the
        // permanent storage using the IO "save" command:
        boolean tempFileRequired = false;
        File tempFile = null;
        OutputStream outputStream = null;
        Dataset dataset = version.getDataset();
        StorageIO<Dataset> storageIO = null;
        try {
            storageIO = DataAccess.createNewStorageIO(dataset, "placeholder");
            Channel outputChannel = storageIO.openAuxChannel("export_" + format + ".cached", DataAccessOption.WRITE_ACCESS);
            outputStream = Channels.newOutputStream((WritableByteChannel) outputChannel);
        } catch (IOException ioex) {
            tempFileRequired = true;
            tempFile = File.createTempFile("tempFileToExport", ".tmp");
            outputStream = new FileOutputStream(tempFile);
        }
        try {
            Path cachedMetadataFilePath = Paths.get(version.getDataset().getFileSystemDirectory().toString(), "export_" + format + ".cached");
            if (!tempFileRequired) {
                FileOutputStream cachedExportOutputStream = new FileOutputStream(cachedMetadataFilePath.toFile());
                exporter.exportDataset(version, datasetAsJson, cachedExportOutputStream);
                cachedExportOutputStream.flush();
                cachedExportOutputStream.close();
            } else {
                // this method copies a local filesystem Path into this DataAccess Auxiliary location:
                exporter.exportDataset(version, datasetAsJson, outputStream);
                outputStream.flush();
                outputStream.close();
                logger.fine("Saving path as aux for temp file in: " + Paths.get(tempFile.getAbsolutePath()));
                storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), "export_" + format + ".cached");
                boolean tempFileDeleted = tempFile.delete();
                logger.fine("tempFileDeleted: " + tempFileDeleted);
            }
        } catch (IOException ioex) {
            throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached");
        }
    } catch (IOException ioex) {
        throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached");
    }
}
Also used : Path(java.nio.file.Path) Dataset(edu.harvard.iq.dataverse.Dataset) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Channel(java.nio.channels.Channel) WritableByteChannel(java.nio.channels.WritableByteChannel) FileOutputStream(java.io.FileOutputStream) WritableByteChannel(java.nio.channels.WritableByteChannel) IOException(java.io.IOException) File(java.io.File)

Example 75 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class ExportService method clearCachedExport.

private void clearCachedExport(Dataset dataset, String format) throws IOException {
    try {
        StorageIO<Dataset> storageIO = getStorageIO(dataset);
        storageIO.deleteAuxObject("export_" + format + ".cached");
    } catch (IOException ex) {
        throw new IOException("IO Exception thrown exporting as " + "export_" + format + ".cached");
    }
}
Also used : Dataset(edu.harvard.iq.dataverse.Dataset) IOException(java.io.IOException)

Aggregations

Dataset (edu.harvard.iq.dataverse.Dataset)142 Test (org.junit.Test)53 DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)44 DataFile (edu.harvard.iq.dataverse.DataFile)41 Dataverse (edu.harvard.iq.dataverse.Dataverse)34 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)27 CommandException (edu.harvard.iq.dataverse.engine.command.exception.CommandException)23 IOException (java.io.IOException)22 ArrayList (java.util.ArrayList)22 Path (javax.ws.rs.Path)22 FileMetadata (edu.harvard.iq.dataverse.FileMetadata)17 DataverseRole (edu.harvard.iq.dataverse.authorization.DataverseRole)17 RoleAssignment (edu.harvard.iq.dataverse.RoleAssignment)16 DataverseRequest (edu.harvard.iq.dataverse.engine.command.DataverseRequest)16 Date (java.util.Date)16 JsonObjectBuilder (javax.json.JsonObjectBuilder)16 JsonObject (javax.json.JsonObject)15 DvObject (edu.harvard.iq.dataverse.DvObject)14 RoleAssignee (edu.harvard.iq.dataverse.authorization.RoleAssignee)14 PrivateUrlUser (edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser)14