Search in sources :

Example 86 with ValidatorFactory

use of javax.validation.ValidatorFactory in project minijax by minijax.

the class CarTest method setUpValidator.

@BeforeClass
public static void setUpValidator() {
    ValidatorFactory factory = Validation.byProvider(MinijaxValidationProvider.class).configure().buildValidatorFactory();
    validator = factory.getValidator();
}
Also used : ValidatorFactory(javax.validation.ValidatorFactory) BeforeClass(org.junit.BeforeClass)

Example 87 with ValidatorFactory

use of javax.validation.ValidatorFactory in project dataverse by IQSS.

the class ImportServiceBean method doImportHarvestedDataset.

@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog) throws ImportException, IOException {
    if (harvestingClient == null || harvestingClient.getDataverse() == null) {
        throw new ImportException("importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient.");
    }
    Dataverse owner = harvestingClient.getDataverse();
    Dataset importedDataset = null;
    DatasetDTO dsDTO = null;
    String json = null;
    if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat) || metadataFormat.toLowerCase().matches("^oai_ddi.*")) {
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            // TODO:
            // import type should be configurable - it should be possible to
            // select whether you want to harvest with or without files,
            // ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
            logger.fine("importing DDI " + metadataFile.getAbsolutePath());
            dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse);
        } catch (IOException | XMLStreamException | ImportException e) {
            throw new ImportException("Failed to process DDI XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
        logger.fine("importing DC " + metadataFile.getAbsolutePath());
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            dsDTO = importGenericService.processOAIDCxml(xmlToParse);
        } catch (IOException | XMLStreamException e) {
            throw new ImportException("Failed to process Dublin Core XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dataverse_json".equals(metadataFormat)) {
        // This is Dataverse metadata already formatted in JSON.
        // Simply read it into a string, and pass to the final import further down:
        logger.fine("Attempting to import custom dataverse metadata from file " + metadataFile.getAbsolutePath());
        json = new String(Files.readAllBytes(metadataFile.toPath()));
    } else {
        throw new ImportException("Unsupported import metadata format: " + metadataFormat);
    }
    if (json == null) {
        if (dsDTO != null) {
            // convert DTO to Json,
            Gson gson = new GsonBuilder().setPrettyPrinting().create();
            json = gson.toJson(dsDTO);
            logger.fine("JSON produced for the metadata harvested: " + json);
        } else {
            throw new ImportException("Failed to transform XML metadata format " + metadataFormat + " into a DatasetDTO");
        }
    }
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(true);
        Dataset ds = parser.parseDataset(obj);
        // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol
        // we support, it should be rejected.
        // (TODO: ! - add some way of keeping track of supported protocols!)
        // if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
        // throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
        // }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            // For migration and harvest, add NA for missing required values
            for (ConstraintViolation<DatasetField> v : violations) {
                DatasetField f = v.getRootBean();
                f.setSingleValue(DatasetField.NA_VALUE);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                // TODO: Is this scrubbing something we want to continue doing?
                if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                    cleanupLog.println(msg);
                    f.setValue(DatasetField.NA_VALUE);
                }
            }
        }
        // this dataset:
        if (StringUtils.isEmpty(ds.getGlobalId())) {
            throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global unique identifier that we could recognize, skipping.");
        }
        ds.setHarvestedFrom(harvestingClient);
        ds.setHarvestIdentifier(harvestIdentifier);
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            // we are just going to skip it!
            if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
                throw new ImportException("The dataset with the global id " + ds.getGlobalId() + " already exists, in the dataverse " + existingDs.getOwner().getAlias() + ", skipping.");
            }
            // skip it also:
            if (!existingDs.isHarvested()) {
                throw new ImportException("A LOCAL dataset with the global id " + ds.getGlobalId() + " already exists in this dataverse; skipping.");
            }
            // We will replace the current version with the imported version.
            if (existingDs.getVersions().size() != 1) {
                throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
            }
            // Purge all the SOLR documents associated with this client from the
            // index server:
            indexService.deleteHarvestedDocuments(existingDs);
            // DeleteFileCommand on them.
            for (DataFile harvestedFile : existingDs.getFiles()) {
                DataFile merged = em.merge(harvestedFile);
                em.remove(merged);
                harvestedFile = null;
            }
            // TODO:
            // Verify what happens with the indexed files in SOLR?
            // are they going to be overwritten by the reindexing of the dataset?
            existingDs.setFiles(null);
            Dataset merged = em.merge(existingDs);
            engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest));
            importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        } else {
            importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        }
    } catch (JsonParseException | ImportException | CommandException ex) {
        logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage());
        FileOutputStream savedJsonFileStream = new FileOutputStream(new File(metadataFile.getAbsolutePath() + ".json"));
        byte[] jsonBytes = json.getBytes();
        int i = 0;
        while (i < jsonBytes.length) {
            int chunkSize = i + 8192 <= jsonBytes.length ? 8192 : jsonBytes.length - i;
            savedJsonFileStream.write(jsonBytes, i, chunkSize);
            i += chunkSize;
            savedJsonFileStream.flush();
        }
        savedJsonFileStream.close();
        logger.info("JSON produced saved in " + metadataFile.getAbsolutePath() + ".json");
        throw new ImportException("Failed to import harvested dataset: " + ex.getClass() + " (" + ex.getMessage() + ")", ex);
    }
    return importedDataset;
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DataFile(edu.harvard.iq.dataverse.DataFile) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) IOException(java.io.IOException) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) Dataverse(edu.harvard.iq.dataverse.Dataverse) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) FileOutputStream(java.io.FileOutputStream) DataFile(edu.harvard.iq.dataverse.DataFile) File(java.io.File) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand) TransactionAttribute(javax.ejb.TransactionAttribute)

Example 88 with ValidatorFactory

use of javax.validation.ValidatorFactory in project dataverse by IQSS.

the class AuthenticationServiceBean method convertRemoteToBuiltIn.

/**
 * @param idOfAuthUserToConvert The id of the remote AuthenticatedUser
 * (Shibboleth user or OAuth user) to convert to a BuiltinUser.
 * @param newEmailAddress The new email address that will be used instead of
 * the user's old email address from the institution that they have left.
 * @return BuiltinUser
 * @throws java.lang.Exception You must catch and report back to the user (a
 * superuser) any Exceptions.
 */
public BuiltinUser convertRemoteToBuiltIn(Long idOfAuthUserToConvert, String newEmailAddress) throws Exception {
    AuthenticatedUser authenticatedUser = findByID(idOfAuthUserToConvert);
    if (authenticatedUser == null) {
        throw new Exception("User id " + idOfAuthUserToConvert + " not found.");
    }
    AuthenticatedUser existingUserWithSameEmail = getAuthenticatedUserByEmail(newEmailAddress);
    if (existingUserWithSameEmail != null) {
        throw new Exception("User id " + idOfAuthUserToConvert + " (" + authenticatedUser.getIdentifier() + ") cannot be converted from remote to BuiltIn because the email address " + newEmailAddress + " is already in use by user id " + existingUserWithSameEmail.getId() + " (" + existingUserWithSameEmail.getIdentifier() + ").");
    }
    BuiltinUser builtinUser = new BuiltinUser();
    builtinUser.setUserName(authenticatedUser.getUserIdentifier());
    builtinUser.setFirstName(authenticatedUser.getFirstName());
    builtinUser.setLastName(authenticatedUser.getLastName());
    // Bean Validation will check for null and invalid email addresses
    builtinUser.setEmail(newEmailAddress);
    ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
    Validator validator = factory.getValidator();
    Set<ConstraintViolation<BuiltinUser>> violations = validator.validate(builtinUser);
    int numViolations = violations.size();
    if (numViolations > 0) {
        StringBuilder logMsg = new StringBuilder();
        for (ConstraintViolation<?> violation : violations) {
            logMsg.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
        }
        throw new Exception("User id " + idOfAuthUserToConvert + " cannot be converted from remote to BuiltIn because of constraint violations on the BuiltIn user that would be created: " + numViolations + ". Details: " + logMsg);
    }
    try {
        builtinUser = builtinUserServiceBean.save(builtinUser);
    } catch (IllegalArgumentException ex) {
        throw new Exception("User id " + idOfAuthUserToConvert + " cannot be converted from remote to BuiltIn because of an IllegalArgumentException creating the row in the builtinuser table: " + ex);
    }
    AuthenticatedUserLookup lookup = authenticatedUser.getAuthenticatedUserLookup();
    if (lookup == null) {
        throw new Exception("User id " + idOfAuthUserToConvert + " does not have an 'authenticateduserlookup' row");
    }
    String providerId = lookup.getAuthenticationProviderId();
    if (providerId == null) {
        throw new Exception("User id " + idOfAuthUserToConvert + " provider id is null.");
    }
    String builtinProviderId = BuiltinAuthenticationProvider.PROVIDER_ID;
    if (providerId.equals(builtinProviderId)) {
        throw new Exception("User id " + idOfAuthUserToConvert + " cannot be converted from remote to BuiltIn because current provider id is '" + providerId + "' which is the same as '" + builtinProviderId + "'. This user is already a BuiltIn user.");
    }
    lookup.setAuthenticationProviderId(BuiltinAuthenticationProvider.PROVIDER_ID);
    lookup.setPersistentUserId(authenticatedUser.getUserIdentifier());
    em.persist(lookup);
    authenticatedUser.setEmail(newEmailAddress);
    em.persist(authenticatedUser);
    em.flush();
    return builtinUser;
}
Also used : BuiltinUser(edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser) ValidatorFactory(javax.validation.ValidatorFactory) AuthenticatedUser(edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser) NoResultException(javax.persistence.NoResultException) NonUniqueResultException(javax.persistence.NonUniqueResultException) AuthenticationProviderFactoryNotFoundException(edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationProviderFactoryNotFoundException) EJBException(javax.ejb.EJBException) AuthorizationSetupException(edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException) AuthenticationFailedException(edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException) ConstraintViolation(javax.validation.ConstraintViolation) Validator(javax.validation.Validator)

Example 89 with ValidatorFactory

use of javax.validation.ValidatorFactory in project ApiEE by avraampiperidis.

the class Validator method validate.

static <T extends BaseEntity> void validate(T entity) {
    ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
    javax.validation.Validator validator = (javax.validation.Validator) factory.getValidator();
    Set<ConstraintViolation<T>> cv = validator.validate(entity);
    if (!cv.isEmpty()) {
        throw new ConstraintViolationException(cv);
    }
    if (!entity.isValid()) {
        throw new ConstraintViolationException("Entity validation failed", null);
    }
}
Also used : ValidatorFactory(javax.validation.ValidatorFactory) ConstraintViolation(javax.validation.ConstraintViolation) ConstraintViolationException(javax.validation.ConstraintViolationException)

Example 90 with ValidatorFactory

use of javax.validation.ValidatorFactory in project crnk-framework by crnk-project.

the class ValidationEndToEndTest method testMapElementAttributeNotNull.

@Test
public void testMapElementAttributeNotNull() {
    ProjectData data = new ProjectData();
    // violation
    data.setValue(null);
    Project project = new Project();
    project.setId(1L);
    project.setName("test");
    project.setDataMap(new LinkedHashMap());
    project.getDataMap().put("someKey", data);
    ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
    Validator validator = factory.getValidator();
    try {
        projectRepo.create(project);
    } catch (ConstraintViolationException e) {
        Set<ConstraintViolation<?>> violations = e.getConstraintViolations();
        Assert.assertEquals(1, violations.size());
        ConstraintViolationImpl violation = (ConstraintViolationImpl) violations.iterator().next();
        Assert.assertEquals("{javax.validation.constraints.NotNull.message}", violation.getMessageTemplate());
        Assert.assertEquals("dataMap[someKey].value", violation.getPropertyPath().toString());
        Assert.assertNotNull(violation.getMessage());
        Assert.assertEquals("/data/attributes/dataMap/someKey/value", violation.getErrorData().getSourcePointer());
    }
}
Also used : Project(io.crnk.validation.mock.models.Project) Set(java.util.Set) HashSet(java.util.HashSet) ValidatorFactory(javax.validation.ValidatorFactory) ConstraintViolationImpl(io.crnk.validation.internal.ConstraintViolationImpl) ConstraintViolationException(javax.validation.ConstraintViolationException) ProjectData(io.crnk.validation.mock.models.ProjectData) Validator(javax.validation.Validator) ComplexValidator(io.crnk.validation.mock.ComplexValidator) LinkedHashMap(java.util.LinkedHashMap) Test(org.junit.Test)

Aggregations

ValidatorFactory (javax.validation.ValidatorFactory)106 Validator (javax.validation.Validator)31 Before (org.junit.Before)28 ConstraintViolation (javax.validation.ConstraintViolation)25 Test (org.junit.Test)17 BeforeClass (org.junit.BeforeClass)13 ConstraintViolationException (javax.validation.ConstraintViolationException)7 HibernateValidatorConfiguration (org.hibernate.validator.HibernateValidatorConfiguration)6 ResourceBundleMessageInterpolator (org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator)6 PlatformResourceBundleLocator (org.hibernate.validator.resourceloading.PlatformResourceBundleLocator)6 ArrayList (java.util.ArrayList)5 HashMap (java.util.HashMap)5 BeanManager (javax.enterprise.inject.spi.BeanManager)5 InitialContext (javax.naming.InitialContext)5 ResourceBundleMessageSource (org.springframework.context.support.ResourceBundleMessageSource)5 HashSet (java.util.HashSet)4 NamingException (javax.naming.NamingException)4 ConstraintValidatorFactory (javax.validation.ConstraintValidatorFactory)4 ValidatorContext (javax.validation.ValidatorContext)4 HibernateValidator (org.hibernate.validator.HibernateValidator)4