Search in sources :

Example 1 with JsonParser

use of edu.harvard.iq.dataverse.util.json.JsonParser in project dataverse by IQSS.

the class DDIExporterTest method testExportDataset.

@Test
public void testExportDataset() throws Exception {
    System.out.println("exportDataset");
    // FIXME: switch ddi/dataset-finch1.json
    // File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json");
    File datasetVersionJson = new File("src/test/resources/json/dataset-finch1.json");
    String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
    JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
    JsonObject json = jsonReader.readObject();
    JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, null);
    DatasetVersion version = jsonParser.parseDatasetVersion(json.getJsonObject("datasetVersion"));
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    DDIExporter instance = new DDIExporter();
    boolean nullPointerFixed = false;
    if (nullPointerFixed) {
        instance.exportDataset(version, json, byteArrayOutputStream);
    }
    System.out.println("out: " + XmlPrinter.prettyPrintXml(byteArrayOutputStream.toString()));
}
Also used : StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) ByteArrayOutputStream(java.io.ByteArrayOutputStream) File(java.io.File) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) Test(org.junit.Test)

Example 2 with JsonParser

use of edu.harvard.iq.dataverse.util.json.JsonParser in project dataverse by IQSS.

the class DDIExporterTest method testExportDatasetContactEmailAbsent.

@Test
public void testExportDatasetContactEmailAbsent() throws Exception {
    File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/datasetContactEmailAbsent.json");
    String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
    JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
    JsonObject json = jsonReader.readObject();
    // JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsSvc);
    JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, null);
    DatasetVersion version = jsonParser.parseDatasetVersion(json.getJsonObject("datasetVersion"));
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    DDIExporter instance = new DDIExporter();
    instance.exportDataset(version, json, byteArrayOutputStream);
    System.out.println(XmlPrinter.prettyPrintXml(byteArrayOutputStream.toString()));
    assertFalse(byteArrayOutputStream.toString().contains("finch@mailinator.com"));
}
Also used : StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) ByteArrayOutputStream(java.io.ByteArrayOutputStream) File(java.io.File) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) Test(org.junit.Test)

Example 3 with JsonParser

use of edu.harvard.iq.dataverse.util.json.JsonParser in project dataverse by IQSS.

the class DDIExporterTest method testExportDatasetContactEmailPresent.

@Test
public void testExportDatasetContactEmailPresent() throws Exception {
    File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/datasetContactEmailPresent.json");
    String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
    JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
    JsonObject json = jsonReader.readObject();
    JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, null);
    DatasetVersion version = jsonParser.parseDatasetVersion(json.getJsonObject("datasetVersion"));
    ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    DDIExporter instance = new DDIExporter();
    instance.exportDataset(version, json, byteArrayOutputStream);
    System.out.println(XmlPrinter.prettyPrintXml(byteArrayOutputStream.toString()));
    assertTrue(byteArrayOutputStream.toString().contains("finch@mailinator.com"));
}
Also used : StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) ByteArrayOutputStream(java.io.ByteArrayOutputStream) File(java.io.File) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) Test(org.junit.Test)

Example 4 with JsonParser

use of edu.harvard.iq.dataverse.util.json.JsonParser in project dataverse by IQSS.

the class Groups method putIpGroups.

/**
 * Creates or updates the {@link IpGroup} named {@code groupName}.
 * @param groupName Name of the group.
 * @param dto data of the group.
 * @return Response describing the created group or the error that prevented
 *         that group from being created.
 */
@PUT
@Path("ip/{groupName}")
public Response putIpGroups(@PathParam("groupName") String groupName, JsonObject dto) {
    try {
        if (groupName == null || groupName.trim().isEmpty()) {
            return badRequest("Group name cannot be empty");
        }
        if (!legalGroupName.matcher(groupName).matches()) {
            return badRequest("Group name can contain only letters, digits, and the chars '-' and '_'");
        }
        IpGroup grp = new JsonParser().parseIpGroup(dto);
        grp.setGroupProvider(ipGroupPrv);
        grp.setPersistedGroupAlias(groupName);
        grp = ipGroupPrv.store(grp);
        return created("/groups/ip/" + grp.getPersistedGroupAlias(), json(grp));
    } catch (Exception e) {
        logger.log(Level.WARNING, "Error while storing a new IP group: " + e.getMessage(), e);
        return error(Response.Status.INTERNAL_SERVER_ERROR, "Error: " + e.getMessage());
    }
}
Also used : IpGroup(edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) Path(javax.ws.rs.Path) PUT(javax.ws.rs.PUT)

Example 5 with JsonParser

use of edu.harvard.iq.dataverse.util.json.JsonParser in project dataverse by IQSS.

the class ImportServiceBean method doImport.

public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
    String status = "";
    Long createdId = null;
    DatasetDTO dsDTO = null;
    try {
        dsDTO = importDDIService.doImport(importType, xmlToParse);
    } catch (XMLStreamException e) {
        throw new ImportException("XMLStreamException" + e);
    }
    // convert DTO to Json,
    Gson gson = new GsonBuilder().setPrettyPrinting().create();
    String json = gson.toJson(dsDTO);
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(!importType.equals(ImportType.NEW));
        Dataset ds = parser.parseDataset(obj);
        // we support, it will be rejected.
        if (importType.equals(ImportType.NEW)) {
            if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
                throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
            }
        }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
                // For migration and harvest, add NA for missing required values
                for (ConstraintViolation<DatasetField> v : violations) {
                    DatasetField f = v.getRootBean();
                    f.setSingleValue(DatasetField.NA_VALUE);
                }
            } else {
                // when importing a new dataset, the import will fail
                // if required values are missing.
                String errMsg = "Error importing data:";
                for (ConstraintViolation<DatasetField> v : violations) {
                    errMsg += " " + v.getMessage();
                }
                throw new ImportException(errMsg);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, fileName);
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    if (importType.equals(ImportType.HARVEST)) {
                        String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                        cleanupLog.println(msg);
                        f.setValue(DatasetField.NA_VALUE);
                    } else {
                        String msg = " Validation error for ";
                        if (converted) {
                            msg += "converted ";
                        }
                        msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
                        throw new ImportException(msg);
                    }
                }
            }
        }
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            if (importType.equals(ImportType.HARVEST)) {
                // We will replace the current version with the imported version.
                if (existingDs.getVersions().size() != 1) {
                    throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
                }
                engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
                Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
                status = " updated dataset, id=" + managedDs.getId() + ".";
            } else {
                // check that the version number isn't already in the dataset
                for (DatasetVersion dsv : existingDs.getVersions()) {
                    if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
                        throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
                    }
                }
                DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
                status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
                createdId = dsv.getId();
            }
        } else {
            Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
            status = " created dataset, id=" + managedDs.getId() + ".";
            createdId = managedDs.getId();
        }
    } catch (JsonParseException ex) {
        logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
        throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
    } catch (CommandException ex) {
        logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
        throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
    }
    return Json.createObjectBuilder().add("message", status);
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) CreateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand)

Aggregations

JsonParser (edu.harvard.iq.dataverse.util.json.JsonParser)12 StringReader (java.io.StringReader)9 JsonObject (javax.json.JsonObject)9 JsonReader (javax.json.JsonReader)9 DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)8 JsonParseException (edu.harvard.iq.dataverse.util.json.JsonParseException)6 Gson (com.google.gson.Gson)5 File (java.io.File)5 XMLStreamException (javax.xml.stream.XMLStreamException)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 Test (org.junit.Test)4 Dataset (edu.harvard.iq.dataverse.Dataset)3 GsonBuilder (com.google.gson.GsonBuilder)2 DatasetField (edu.harvard.iq.dataverse.DatasetField)2 DatasetFieldValue (edu.harvard.iq.dataverse.DatasetFieldValue)2 Dataverse (edu.harvard.iq.dataverse.Dataverse)2 ForeignMetadataFormatMapping (edu.harvard.iq.dataverse.ForeignMetadataFormatMapping)2 DatasetDTO (edu.harvard.iq.dataverse.api.dto.DatasetDTO)2 IpGroup (edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup)2 CommandException (edu.harvard.iq.dataverse.engine.command.exception.CommandException)2