Search in sources :

Example 31 with TransactionAttribute

use of javax.ejb.TransactionAttribute in project dataverse by IQSS.

the class ImportServiceBean method createDataverse.

/**
 * This is just a convenience method, for testing migration.  It creates
 * a dummy dataverse with the directory name as dataverse name & alias.
 * @param dvName
 * @param dataverseRequest
 * @return
 * @throws ImportException
 */
@TransactionAttribute(REQUIRES_NEW)
public Dataverse createDataverse(String dvName, DataverseRequest dataverseRequest) throws ImportException {
    Dataverse d = new Dataverse();
    Dataverse root = dataverseService.findByAlias("root");
    d.setOwner(root);
    d.setAlias(dvName);
    d.setName(dvName);
    d.setAffiliation("affiliation");
    d.setPermissionRoot(false);
    d.setDescription("description");
    d.setDataverseType(Dataverse.DataverseType.RESEARCHERS);
    DataverseContact dc = new DataverseContact();
    dc.setContactEmail("pete@mailinator.com");
    ArrayList<DataverseContact> dcList = new ArrayList<>();
    dcList.add(dc);
    d.setDataverseContacts(dcList);
    try {
        d = engineSvc.submit(new CreateDataverseCommand(d, dataverseRequest, null, null));
    } catch (EJBException ex) {
        Throwable cause = ex;
        StringBuilder sb = new StringBuilder();
        sb.append("Error creating dataverse.");
        while (cause.getCause() != null) {
            cause = cause.getCause();
            if (cause instanceof ConstraintViolationException) {
                ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
                for (ConstraintViolation<?> violation : constraintViolationException.getConstraintViolations()) {
                    sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
                }
            }
        }
        logger.log(Level.SEVERE, sb.toString());
        System.out.println("Error creating dataverse: " + sb.toString());
        throw new ImportException(sb.toString());
    } catch (CommandException e) {
        throw new ImportException(e.getMessage());
    }
    return d;
}
Also used : ConstraintViolation(javax.validation.ConstraintViolation) ArrayList(java.util.ArrayList) DataverseContact(edu.harvard.iq.dataverse.DataverseContact) ConstraintViolationException(javax.validation.ConstraintViolationException) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) CreateDataverseCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand) EJBException(javax.ejb.EJBException) Dataverse(edu.harvard.iq.dataverse.Dataverse) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 32 with TransactionAttribute

use of javax.ejb.TransactionAttribute in project dataverse by IQSS.

the class ImportServiceBean method doImportHarvestedDataset.

@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog) throws ImportException, IOException {
    if (harvestingClient == null || harvestingClient.getDataverse() == null) {
        throw new ImportException("importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient.");
    }
    Dataverse owner = harvestingClient.getDataverse();
    Dataset importedDataset = null;
    DatasetDTO dsDTO = null;
    String json = null;
    if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat) || metadataFormat.toLowerCase().matches("^oai_ddi.*")) {
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            // TODO:
            // import type should be configurable - it should be possible to
            // select whether you want to harvest with or without files,
            // ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
            logger.fine("importing DDI " + metadataFile.getAbsolutePath());
            dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse);
        } catch (IOException | XMLStreamException | ImportException e) {
            throw new ImportException("Failed to process DDI XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
        logger.fine("importing DC " + metadataFile.getAbsolutePath());
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            dsDTO = importGenericService.processOAIDCxml(xmlToParse);
        } catch (IOException | XMLStreamException e) {
            throw new ImportException("Failed to process Dublin Core XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dataverse_json".equals(metadataFormat)) {
        // This is Dataverse metadata already formatted in JSON.
        // Simply read it into a string, and pass to the final import further down:
        logger.fine("Attempting to import custom dataverse metadata from file " + metadataFile.getAbsolutePath());
        json = new String(Files.readAllBytes(metadataFile.toPath()));
    } else {
        throw new ImportException("Unsupported import metadata format: " + metadataFormat);
    }
    if (json == null) {
        if (dsDTO != null) {
            // convert DTO to Json,
            Gson gson = new GsonBuilder().setPrettyPrinting().create();
            json = gson.toJson(dsDTO);
            logger.fine("JSON produced for the metadata harvested: " + json);
        } else {
            throw new ImportException("Failed to transform XML metadata format " + metadataFormat + " into a DatasetDTO");
        }
    }
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(true);
        Dataset ds = parser.parseDataset(obj);
        // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol
        // we support, it should be rejected.
        // (TODO: ! - add some way of keeping track of supported protocols!)
        // if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
        // throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
        // }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            // For migration and harvest, add NA for missing required values
            for (ConstraintViolation<DatasetField> v : violations) {
                DatasetField f = v.getRootBean();
                f.setSingleValue(DatasetField.NA_VALUE);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                // TODO: Is this scrubbing something we want to continue doing?
                if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                    cleanupLog.println(msg);
                    f.setValue(DatasetField.NA_VALUE);
                }
            }
        }
        // this dataset:
        if (StringUtils.isEmpty(ds.getGlobalId())) {
            throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global unique identifier that we could recognize, skipping.");
        }
        ds.setHarvestedFrom(harvestingClient);
        ds.setHarvestIdentifier(harvestIdentifier);
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            // we are just going to skip it!
            if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
                throw new ImportException("The dataset with the global id " + ds.getGlobalId() + " already exists, in the dataverse " + existingDs.getOwner().getAlias() + ", skipping.");
            }
            // skip it also:
            if (!existingDs.isHarvested()) {
                throw new ImportException("A LOCAL dataset with the global id " + ds.getGlobalId() + " already exists in this dataverse; skipping.");
            }
            // We will replace the current version with the imported version.
            if (existingDs.getVersions().size() != 1) {
                throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
            }
            // Purge all the SOLR documents associated with this client from the
            // index server:
            indexService.deleteHarvestedDocuments(existingDs);
            // DeleteFileCommand on them.
            for (DataFile harvestedFile : existingDs.getFiles()) {
                DataFile merged = em.merge(harvestedFile);
                em.remove(merged);
                harvestedFile = null;
            }
            // TODO:
            // Verify what happens with the indexed files in SOLR?
            // are they going to be overwritten by the reindexing of the dataset?
            existingDs.setFiles(null);
            Dataset merged = em.merge(existingDs);
            engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest));
            importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        } else {
            importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        }
    } catch (JsonParseException | ImportException | CommandException ex) {
        logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage());
        FileOutputStream savedJsonFileStream = new FileOutputStream(new File(metadataFile.getAbsolutePath() + ".json"));
        byte[] jsonBytes = json.getBytes();
        int i = 0;
        while (i < jsonBytes.length) {
            int chunkSize = i + 8192 <= jsonBytes.length ? 8192 : jsonBytes.length - i;
            savedJsonFileStream.write(jsonBytes, i, chunkSize);
            i += chunkSize;
            savedJsonFileStream.flush();
        }
        savedJsonFileStream.close();
        logger.info("JSON produced saved in " + metadataFile.getAbsolutePath() + ".json");
        throw new ImportException("Failed to import harvested dataset: " + ex.getClass() + " (" + ex.getMessage() + ")", ex);
    }
    return importedDataset;
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DataFile(edu.harvard.iq.dataverse.DataFile) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) IOException(java.io.IOException) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) Dataverse(edu.harvard.iq.dataverse.Dataverse) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) FileOutputStream(java.io.FileOutputStream) DataFile(edu.harvard.iq.dataverse.DataFile) File(java.io.File) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 33 with TransactionAttribute

use of javax.ejb.TransactionAttribute in project dataverse by IQSS.

the class IndexServiceBean method indexDatasetInNewTransaction.

@TransactionAttribute(REQUIRES_NEW)
public Future<String> indexDatasetInNewTransaction(Long datasetId) {
    // Dataset dataset) {
    boolean doNormalSolrDocCleanUp = false;
    Dataset dataset = em.find(Dataset.class, datasetId);
    // return indexDataset(dataset, doNormalSolrDocCleanUp);
    Future<String> ret = indexDataset(dataset, doNormalSolrDocCleanUp);
    dataset = null;
    return ret;
}
Also used : Dataset(edu.harvard.iq.dataverse.Dataset) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 34 with TransactionAttribute

use of javax.ejb.TransactionAttribute in project dataverse by IQSS.

the class WorkflowServiceBean method lockDataset.

@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
void lockDataset(WorkflowContext ctxt) throws CommandException {
    final DatasetLock datasetLock = new DatasetLock(DatasetLock.Reason.Workflow, ctxt.getRequest().getAuthenticatedUser());
    // engine.submit(new AddLockCommand(ctxt.getRequest(), ctxt.getDataset(), datasetLock));
    datasetLock.setDataset(ctxt.getDataset());
    em.persist(datasetLock);
    em.flush();
}
Also used : DatasetLock(edu.harvard.iq.dataverse.DatasetLock) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 35 with TransactionAttribute

use of javax.ejb.TransactionAttribute in project scrapcode by ringerc.

the class DummyEJB method failsIfNotTransactional.

@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void failsIfNotTransactional() {
    Query q;
    q = em.createNativeQuery("SAVEPOINT txtest");
    q.executeUpdate();
    q = em.createNativeQuery("ROLLBACK TO SAVEPOINT txtest");
    q.executeUpdate();
}
Also used : Query(javax.persistence.Query) TransactionAttribute(javax.ejb.TransactionAttribute)

Aggregations

TransactionAttribute (javax.ejb.TransactionAttribute)61 JMSException (javax.jms.JMSException)8 IOException (java.io.IOException)7 Connection (javax.jms.Connection)6 Session (javax.jms.Session)6 TextMessage (javax.jms.TextMessage)6 Query (javax.persistence.Query)5 Configuration (org.hibernate.cfg.Configuration)5 Dataset (edu.harvard.iq.dataverse.Dataset)4 ConfigMessageException (eu.europa.ec.fisheries.uvms.config.exception.ConfigMessageException)4 ArrayList (java.util.ArrayList)4 Date (java.util.Date)4 EJBException (javax.ejb.EJBException)4 Message (javax.jms.Message)4 MessageProducer (javax.jms.MessageProducer)4 DataFile (edu.harvard.iq.dataverse.DataFile)3 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)3 ExchangeMessageException (eu.europa.ec.fisheries.uvms.exchange.message.exception.ExchangeMessageException)3 Properties (java.util.Properties)3 EntityManager (javax.persistence.EntityManager)3