Search in sources :

Example 1 with JsonParseException

use of edu.harvard.iq.dataverse.util.json.JsonParseException in project dataverse by IQSS.

the class HarvestingClients method createHarvestingClient.

@POST
@Path("{nickName}")
public Response createHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
    try (StringReader rdr = new StringReader(jsonBody)) {
        JsonObject json = Json.createReader(rdr).readObject();
        HarvestingClient harvestingClient = new HarvestingClient();
        // TODO: check that it doesn't exist yet...
        harvestingClient.setName(nickName);
        String dataverseAlias = jsonParser().parseHarvestingClient(json, harvestingClient);
        Dataverse ownerDataverse = dataverseService.findByAlias(dataverseAlias);
        if (ownerDataverse == null) {
            return error(Response.Status.BAD_REQUEST, "No such dataverse: " + dataverseAlias);
        }
        harvestingClient.setDataverse(ownerDataverse);
        if (ownerDataverse.getHarvestingClientConfigs() == null) {
            ownerDataverse.setHarvestingClientConfigs(new ArrayList<>());
        }
        ownerDataverse.getHarvestingClientConfigs().add(harvestingClient);
        DataverseRequest req = createDataverseRequest(findUserOrDie());
        HarvestingClient managedHarvestingClient = execCommand(new CreateHarvestingClientCommand(req, harvestingClient));
        return created("/harvest/clients/" + nickName, harvestingConfigAsJson(managedHarvestingClient));
    } catch (JsonParseException ex) {
        return error(Response.Status.BAD_REQUEST, "Error parsing harvesting client: " + ex.getMessage());
    } catch (WrappedResponse ex) {
        return ex.getResponse();
    }
}
Also used : DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) CreateHarvestingClientCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand) StringReader(java.io.StringReader) JsonObject(javax.json.JsonObject) HarvestingClient(edu.harvard.iq.dataverse.harvest.client.HarvestingClient) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) Dataverse(edu.harvard.iq.dataverse.Dataverse) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST)

Example 2 with JsonParseException

use of edu.harvard.iq.dataverse.util.json.JsonParseException in project dataverse by IQSS.

the class ImportServiceBean method doImport.

public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
    String status = "";
    Long createdId = null;
    DatasetDTO dsDTO = null;
    try {
        dsDTO = importDDIService.doImport(importType, xmlToParse);
    } catch (XMLStreamException e) {
        throw new ImportException("XMLStreamException" + e);
    }
    // convert DTO to Json,
    Gson gson = new GsonBuilder().setPrettyPrinting().create();
    String json = gson.toJson(dsDTO);
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(!importType.equals(ImportType.NEW));
        Dataset ds = parser.parseDataset(obj);
        // we support, it will be rejected.
        if (importType.equals(ImportType.NEW)) {
            if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
                throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
            }
        }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
                // For migration and harvest, add NA for missing required values
                for (ConstraintViolation<DatasetField> v : violations) {
                    DatasetField f = v.getRootBean();
                    f.setSingleValue(DatasetField.NA_VALUE);
                }
            } else {
                // when importing a new dataset, the import will fail
                // if required values are missing.
                String errMsg = "Error importing data:";
                for (ConstraintViolation<DatasetField> v : violations) {
                    errMsg += " " + v.getMessage();
                }
                throw new ImportException(errMsg);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, fileName);
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    if (importType.equals(ImportType.HARVEST)) {
                        String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                        cleanupLog.println(msg);
                        f.setValue(DatasetField.NA_VALUE);
                    } else {
                        String msg = " Validation error for ";
                        if (converted) {
                            msg += "converted ";
                        }
                        msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
                        throw new ImportException(msg);
                    }
                }
            }
        }
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            if (importType.equals(ImportType.HARVEST)) {
                // We will replace the current version with the imported version.
                if (existingDs.getVersions().size() != 1) {
                    throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
                }
                engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
                Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
                status = " updated dataset, id=" + managedDs.getId() + ".";
            } else {
                // check that the version number isn't already in the dataset
                for (DatasetVersion dsv : existingDs.getVersions()) {
                    if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
                        throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
                    }
                }
                DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
                status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
                createdId = dsv.getId();
            }
        } else {
            Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
            status = " created dataset, id=" + managedDs.getId() + ".";
            createdId = managedDs.getId();
        }
    } catch (JsonParseException ex) {
        logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
        throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
    } catch (CommandException ex) {
        logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
        throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
    }
    return Json.createObjectBuilder().add("message", status);
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) CreateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand)

Example 3 with JsonParseException

use of edu.harvard.iq.dataverse.util.json.JsonParseException in project dataverse by IQSS.

the class Datasets method updateDraftVersion.

@PUT
@Path("{id}/versions/{versionId}")
public Response updateDraftVersion(String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
    if (!":draft".equals(versionId)) {
        return error(Response.Status.BAD_REQUEST, "Only the :draft version can be updated");
    }
    try (StringReader rdr = new StringReader(jsonBody)) {
        DataverseRequest req = createDataverseRequest(findUserOrDie());
        Dataset ds = findDatasetOrDie(id);
        JsonObject json = Json.createReader(rdr).readObject();
        DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
        // clear possibly stale fields from the incoming dataset version.
        // creation and modification dates are updated by the commands.
        incomingVersion.setId(null);
        incomingVersion.setVersionNumber(null);
        incomingVersion.setMinorVersionNumber(null);
        incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
        incomingVersion.setDataset(ds);
        incomingVersion.setCreateTime(null);
        incomingVersion.setLastUpdateTime(null);
        boolean updateDraft = ds.getLatestVersion().isDraft();
        DatasetVersion managedVersion = execCommand(updateDraft ? new UpdateDatasetVersionCommand(req, incomingVersion) : new CreateDatasetVersionCommand(req, ds, incomingVersion));
        return ok(json(managedVersion));
    } catch (JsonParseException ex) {
        logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
        return error(Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage());
    } catch (WrappedResponse ex) {
        return ex.getResponse();
    }
}
Also used : DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) CreateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand) Dataset(edu.harvard.iq.dataverse.Dataset) StringReader(java.io.StringReader) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) UpdateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand) Path(javax.ws.rs.Path) PUT(javax.ws.rs.PUT)

Example 4 with JsonParseException

use of edu.harvard.iq.dataverse.util.json.JsonParseException in project dataverse by IQSS.

the class HarvestingServer method createOaiSet.

/**
 * create an OAI set from spec in path and other parameters from POST body
 * (as JSON). {"name":$set_name,
 * "description":$optional_set_description,"definition":$set_search_query_string}.
 */
@POST
@Path("{specname}")
public Response createOaiSet(String jsonBody, @PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
    /*
	     * authorization modeled after the UI (aka HarvestingSetsPage)
         */
    AuthenticatedUser dvUser;
    try {
        dvUser = findAuthenticatedUserOrDie();
    } catch (WrappedResponse wr) {
        return wr.getResponse();
    }
    if (!dvUser.isSuperuser()) {
        return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.superUser.required"));
    }
    StringReader rdr = new StringReader(jsonBody);
    try (JsonReader jrdr = Json.createReader(rdr)) {
        JsonObject json = jrdr.readObject();
        OAISet set = new OAISet();
        // Validating spec
        if (!StringUtils.isEmpty(spec)) {
            if (spec.length() > 30) {
                return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.sizelimit"));
            }
            if (!Pattern.matches("^[a-zA-Z0-9\\_\\-]+$", spec)) {
                return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.invalid"));
            // If it passes the regex test, check
            }
            if (oaiSetService.findBySpec(spec) != null) {
                return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.alreadyused"));
            }
        } else {
            return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.required"));
        }
        set.setSpec(spec);
        String name, desc, defn;
        try {
            name = json.getString("name");
        } catch (NullPointerException npe_name) {
            return badRequest(ResourceBundle.getBundle("Bundle").getString("harvestserver.newSetDialog.setspec.required"));
        }
        try {
            defn = json.getString("definition");
        } catch (NullPointerException npe_defn) {
            throw new JsonParseException("definition unspecified");
        }
        try {
            desc = json.getString("description");
        } catch (NullPointerException npe_desc) {
            // treating description as optional
            desc = "";
        }
        set.setName(name);
        set.setDescription(desc);
        set.setDefinition(defn);
        oaiSetService.save(set);
        return created("/harvest/server/oaisets" + spec, oaiSetAsJson(set));
    }
}
Also used : StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonObject(javax.json.JsonObject) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) AuthenticatedUser(edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser) OAISet(edu.harvard.iq.dataverse.harvest.server.OAISet) Path(javax.ws.rs.Path) POST(javax.ws.rs.POST)

Example 5 with JsonParseException

use of edu.harvard.iq.dataverse.util.json.JsonParseException in project dataverse by IQSS.

the class ImportServiceBean method doImportHarvestedDataset.

@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog) throws ImportException, IOException {
    if (harvestingClient == null || harvestingClient.getDataverse() == null) {
        throw new ImportException("importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient.");
    }
    Dataverse owner = harvestingClient.getDataverse();
    Dataset importedDataset = null;
    DatasetDTO dsDTO = null;
    String json = null;
    if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat) || metadataFormat.toLowerCase().matches("^oai_ddi.*")) {
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            // TODO:
            // import type should be configurable - it should be possible to
            // select whether you want to harvest with or without files,
            // ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
            logger.fine("importing DDI " + metadataFile.getAbsolutePath());
            dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse);
        } catch (IOException | XMLStreamException | ImportException e) {
            throw new ImportException("Failed to process DDI XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
        logger.fine("importing DC " + metadataFile.getAbsolutePath());
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            dsDTO = importGenericService.processOAIDCxml(xmlToParse);
        } catch (IOException | XMLStreamException e) {
            throw new ImportException("Failed to process Dublin Core XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dataverse_json".equals(metadataFormat)) {
        // This is Dataverse metadata already formatted in JSON.
        // Simply read it into a string, and pass to the final import further down:
        logger.fine("Attempting to import custom dataverse metadata from file " + metadataFile.getAbsolutePath());
        json = new String(Files.readAllBytes(metadataFile.toPath()));
    } else {
        throw new ImportException("Unsupported import metadata format: " + metadataFormat);
    }
    if (json == null) {
        if (dsDTO != null) {
            // convert DTO to Json,
            Gson gson = new GsonBuilder().setPrettyPrinting().create();
            json = gson.toJson(dsDTO);
            logger.fine("JSON produced for the metadata harvested: " + json);
        } else {
            throw new ImportException("Failed to transform XML metadata format " + metadataFormat + " into a DatasetDTO");
        }
    }
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(true);
        Dataset ds = parser.parseDataset(obj);
        // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol
        // we support, it should be rejected.
        // (TODO: ! - add some way of keeping track of supported protocols!)
        // if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
        // throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
        // }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            // For migration and harvest, add NA for missing required values
            for (ConstraintViolation<DatasetField> v : violations) {
                DatasetField f = v.getRootBean();
                f.setSingleValue(DatasetField.NA_VALUE);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                // TODO: Is this scrubbing something we want to continue doing?
                if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                    cleanupLog.println(msg);
                    f.setValue(DatasetField.NA_VALUE);
                }
            }
        }
        // this dataset:
        if (StringUtils.isEmpty(ds.getGlobalId())) {
            throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global unique identifier that we could recognize, skipping.");
        }
        ds.setHarvestedFrom(harvestingClient);
        ds.setHarvestIdentifier(harvestIdentifier);
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            // we are just going to skip it!
            if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
                throw new ImportException("The dataset with the global id " + ds.getGlobalId() + " already exists, in the dataverse " + existingDs.getOwner().getAlias() + ", skipping.");
            }
            // skip it also:
            if (!existingDs.isHarvested()) {
                throw new ImportException("A LOCAL dataset with the global id " + ds.getGlobalId() + " already exists in this dataverse; skipping.");
            }
            // We will replace the current version with the imported version.
            if (existingDs.getVersions().size() != 1) {
                throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
            }
            // Purge all the SOLR documents associated with this client from the
            // index server:
            indexService.deleteHarvestedDocuments(existingDs);
            // DeleteFileCommand on them.
            for (DataFile harvestedFile : existingDs.getFiles()) {
                DataFile merged = em.merge(harvestedFile);
                em.remove(merged);
                harvestedFile = null;
            }
            // TODO:
            // Verify what happens with the indexed files in SOLR?
            // are they going to be overwritten by the reindexing of the dataset?
            existingDs.setFiles(null);
            Dataset merged = em.merge(existingDs);
            engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest));
            importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        } else {
            importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        }
    } catch (JsonParseException | ImportException | CommandException ex) {
        logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage());
        FileOutputStream savedJsonFileStream = new FileOutputStream(new File(metadataFile.getAbsolutePath() + ".json"));
        byte[] jsonBytes = json.getBytes();
        int i = 0;
        while (i < jsonBytes.length) {
            int chunkSize = i + 8192 <= jsonBytes.length ? 8192 : jsonBytes.length - i;
            savedJsonFileStream.write(jsonBytes, i, chunkSize);
            i += chunkSize;
            savedJsonFileStream.flush();
        }
        savedJsonFileStream.close();
        logger.info("JSON produced saved in " + metadataFile.getAbsolutePath() + ".json");
        throw new ImportException("Failed to import harvested dataset: " + ex.getClass() + " (" + ex.getMessage() + ")", ex);
    }
    return importedDataset;
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DataFile(edu.harvard.iq.dataverse.DataFile) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) IOException(java.io.IOException) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) Dataverse(edu.harvard.iq.dataverse.Dataverse) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) FileOutputStream(java.io.FileOutputStream) DataFile(edu.harvard.iq.dataverse.DataFile) File(java.io.File) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand) TransactionAttribute(javax.ejb.TransactionAttribute)

Aggregations

JsonParseException (edu.harvard.iq.dataverse.util.json.JsonParseException)12 StringReader (java.io.StringReader)11 JsonObject (javax.json.JsonObject)11 DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)6 JsonParser (edu.harvard.iq.dataverse.util.json.JsonParser)6 JsonReader (javax.json.JsonReader)6 Path (javax.ws.rs.Path)6 Gson (com.google.gson.Gson)5 POST (javax.ws.rs.POST)5 XMLStreamException (javax.xml.stream.XMLStreamException)5 Dataset (edu.harvard.iq.dataverse.Dataset)4 Dataverse (edu.harvard.iq.dataverse.Dataverse)4 EJBException (javax.ejb.EJBException)4 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)3 DataverseRequest (edu.harvard.iq.dataverse.engine.command.DataverseRequest)3 CreateDatasetCommand (edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand)3 IOException (java.io.IOException)3 ConstraintViolation (javax.validation.ConstraintViolation)3 GsonBuilder (com.google.gson.GsonBuilder)2 DatasetField (edu.harvard.iq.dataverse.DatasetField)2