use of javax.validation.ValidatorFactory in project nifi-registry by apache.
the class TestRegistryService method setup.
@Before
public void setup() {
metadataService = mock(MetadataService.class);
flowPersistenceProvider = mock(FlowPersistenceProvider.class);
snapshotSerializer = mock(VersionedProcessGroupSerializer.class);
final ValidatorFactory validatorFactory = Validation.buildDefaultValidatorFactory();
validator = validatorFactory.getValidator();
registryService = new RegistryService(metadataService, flowPersistenceProvider, snapshotSerializer, validator);
}
use of javax.validation.ValidatorFactory in project dataverse by IQSS.
the class DatasetVersion method validateRequired.
public List<ConstraintViolation<DatasetField>> validateRequired() {
List<ConstraintViolation<DatasetField>> returnListreturnList = new ArrayList<>();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
for (DatasetField dsf : this.getFlatDatasetFields()) {
// clear out any existing validation message
dsf.setValidationMessage(null);
Set<ConstraintViolation<DatasetField>> constraintViolations = validator.validate(dsf);
for (ConstraintViolation<DatasetField> constraintViolation : constraintViolations) {
dsf.setValidationMessage(constraintViolation.getMessage());
returnListreturnList.add(constraintViolation);
// currently only support one message, so we can break out of the loop after the first constraint violation
break;
}
}
return returnListreturnList;
}
use of javax.validation.ValidatorFactory in project dataverse by IQSS.
the class DatasetVersion method validate.
public Set<ConstraintViolation> validate() {
Set<ConstraintViolation> returnSet = new HashSet<>();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
for (DatasetField dsf : this.getFlatDatasetFields()) {
// clear out any existing validation message
dsf.setValidationMessage(null);
Set<ConstraintViolation<DatasetField>> constraintViolations = validator.validate(dsf);
for (ConstraintViolation<DatasetField> constraintViolation : constraintViolations) {
dsf.setValidationMessage(constraintViolation.getMessage());
returnSet.add(constraintViolation);
// currently only support one message, so we can break out of the loop after the first constraint violation
break;
}
for (DatasetFieldValue dsfv : dsf.getDatasetFieldValues()) {
// clear out any existing validation message
dsfv.setValidationMessage(null);
Set<ConstraintViolation<DatasetFieldValue>> constraintViolations2 = validator.validate(dsfv);
for (ConstraintViolation<DatasetFieldValue> constraintViolation : constraintViolations2) {
dsfv.setValidationMessage(constraintViolation.getMessage());
returnSet.add(constraintViolation);
// currently only support one message, so we can break out of the loop after the first constraint violation
break;
}
}
}
List<FileMetadata> dsvfileMetadatas = this.getFileMetadatas();
if (dsvfileMetadatas != null) {
for (FileMetadata fileMetadata : dsvfileMetadatas) {
Set<ConstraintViolation<FileMetadata>> constraintViolations = validator.validate(fileMetadata);
if (constraintViolations.size() > 0) {
// currently only support one message
ConstraintViolation<FileMetadata> violation = constraintViolations.iterator().next();
/**
* @todo How can we expose this more detailed message
* containing the invalid value to the user?
*/
String message = "Constraint violation found in FileMetadata. " + violation.getMessage() + " " + "The invalid value is \"" + violation.getInvalidValue().toString() + "\".";
logger.info(message);
returnSet.add(violation);
// currently only support one message, so we can break out of the loop after the first constraint violation
break;
}
}
}
return returnSet;
}
use of javax.validation.ValidatorFactory in project dataverse by IQSS.
the class BuiltinUserServiceBean method save.
public BuiltinUser save(BuiltinUser aUser) {
/**
* Trim the email address no matter what the user entered or is entered
* on their behalf in the case of Shibboleth assertions.
*
* @todo Why doesn't Bean Validation report that leading and trailing
* whitespace in an email address is a problem?
*/
aUser.setEmail(aUser.getEmail().trim());
/**
* We throw a proper IllegalArgumentException here because otherwise
* from the API you get a 500 response and "Can't save user: null".
*/
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
Set<ConstraintViolation<BuiltinUser>> violations = validator.validate(aUser);
if (violations.size() > 0) {
StringBuilder sb = new StringBuilder();
violations.stream().forEach((violation) -> {
sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
});
throw new IllegalArgumentException("BuiltinUser could not be saved to due constraint violations: " + sb);
}
if (aUser.getId() == null) {
// see that the username is unique
if (em.createNamedQuery("BuiltinUser.findByUserName").setParameter("userName", aUser.getUserName()).getResultList().size() > 0) {
throw new IllegalArgumentException("BuiltinUser with username '" + aUser.getUserName() + "' already exists.");
}
em.persist(aUser);
return aUser;
} else {
return em.merge(aUser);
}
}
use of javax.validation.ValidatorFactory in project dataverse by IQSS.
the class ImportServiceBean method doImport.
public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
String status = "";
Long createdId = null;
DatasetDTO dsDTO = null;
try {
dsDTO = importDDIService.doImport(importType, xmlToParse);
} catch (XMLStreamException e) {
throw new ImportException("XMLStreamException" + e);
}
// convert DTO to Json,
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(dsDTO);
JsonReader jsonReader = Json.createReader(new StringReader(json));
JsonObject obj = jsonReader.readObject();
// and call parse Json to read it into a dataset
try {
JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
parser.setLenient(!importType.equals(ImportType.NEW));
Dataset ds = parser.parseDataset(obj);
// we support, it will be rejected.
if (importType.equals(ImportType.NEW)) {
if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
}
}
ds.setOwner(owner);
ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
// Check data against required contraints
List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
if (!violations.isEmpty()) {
if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
// For migration and harvest, add NA for missing required values
for (ConstraintViolation<DatasetField> v : violations) {
DatasetField f = v.getRootBean();
f.setSingleValue(DatasetField.NA_VALUE);
}
} else {
// when importing a new dataset, the import will fail
// if required values are missing.
String errMsg = "Error importing data:";
for (ConstraintViolation<DatasetField> v : violations) {
errMsg += " " + v.getMessage();
}
throw new ImportException(errMsg);
}
}
// Check data against validation constraints
// If we are migrating and "scrub migration data" is true we attempt to fix invalid data
// if the fix fails stop processing of this file by throwing exception
Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
if (!invalidViolations.isEmpty()) {
for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
DatasetFieldValue f = v.getRootBean();
boolean fixed = false;
boolean converted = false;
if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
fixed = processMigrationValidationError(f, cleanupLog, fileName);
converted = true;
if (fixed) {
Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
if (!scrubbedViolations.isEmpty()) {
fixed = false;
}
}
}
if (!fixed) {
if (importType.equals(ImportType.HARVEST)) {
String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
cleanupLog.println(msg);
f.setValue(DatasetField.NA_VALUE);
} else {
String msg = " Validation error for ";
if (converted) {
msg += "converted ";
}
msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
throw new ImportException(msg);
}
}
}
}
Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
if (existingDs != null) {
if (importType.equals(ImportType.HARVEST)) {
// We will replace the current version with the imported version.
if (existingDs.getVersions().size() != 1) {
throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
}
engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
status = " updated dataset, id=" + managedDs.getId() + ".";
} else {
// check that the version number isn't already in the dataset
for (DatasetVersion dsv : existingDs.getVersions()) {
if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
}
}
DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
createdId = dsv.getId();
}
} else {
Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
status = " created dataset, id=" + managedDs.getId() + ".";
createdId = managedDs.getId();
}
} catch (JsonParseException ex) {
logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
} catch (CommandException ex) {
logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
}
return Json.createObjectBuilder().add("message", status);
}
Aggregations