Search in sources :

Example 36 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class Admin method getDatasetThumbnailMetadata.

/**
 * This method is used in API tests, called from UtilIt.java.
 */
@GET
@Path("datasets/thumbnailMetadata/{id}")
public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) {
    Dataset dataset = datasetSvc.find(idSupplied);
    if (dataset == null) {
        return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
    }
    JsonObjectBuilder data = Json.createObjectBuilder();
    DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail();
    data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail());
    data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset));
    if (datasetThumbnail != null) {
        data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image());
        DataFile dataFile = datasetThumbnail.getDataFile();
        if (dataFile != null) {
            /**
             * @todo Change this from a String to a long.
             */
            data.add("dataFileId", dataFile.getId().toString());
        }
    }
    return ok(data);
}
Also used : DataFile(edu.harvard.iq.dataverse.DataFile) DatasetThumbnail(edu.harvard.iq.dataverse.dataset.DatasetThumbnail) Dataset(edu.harvard.iq.dataverse.Dataset) JsonObjectBuilder(javax.json.JsonObjectBuilder) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Example 37 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class ImportServiceBean method doImport.

public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
    String status = "";
    Long createdId = null;
    DatasetDTO dsDTO = null;
    try {
        dsDTO = importDDIService.doImport(importType, xmlToParse);
    } catch (XMLStreamException e) {
        throw new ImportException("XMLStreamException" + e);
    }
    // convert DTO to Json,
    Gson gson = new GsonBuilder().setPrettyPrinting().create();
    String json = gson.toJson(dsDTO);
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(!importType.equals(ImportType.NEW));
        Dataset ds = parser.parseDataset(obj);
        // we support, it will be rejected.
        if (importType.equals(ImportType.NEW)) {
            if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
                throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
            }
        }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
                // For migration and harvest, add NA for missing required values
                for (ConstraintViolation<DatasetField> v : violations) {
                    DatasetField f = v.getRootBean();
                    f.setSingleValue(DatasetField.NA_VALUE);
                }
            } else {
                // when importing a new dataset, the import will fail
                // if required values are missing.
                String errMsg = "Error importing data:";
                for (ConstraintViolation<DatasetField> v : violations) {
                    errMsg += " " + v.getMessage();
                }
                throw new ImportException(errMsg);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, fileName);
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    if (importType.equals(ImportType.HARVEST)) {
                        String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                        cleanupLog.println(msg);
                        f.setValue(DatasetField.NA_VALUE);
                    } else {
                        String msg = " Validation error for ";
                        if (converted) {
                            msg += "converted ";
                        }
                        msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
                        throw new ImportException(msg);
                    }
                }
            }
        }
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            if (importType.equals(ImportType.HARVEST)) {
                // We will replace the current version with the imported version.
                if (existingDs.getVersions().size() != 1) {
                    throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
                }
                engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
                Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
                status = " updated dataset, id=" + managedDs.getId() + ".";
            } else {
                // check that the version number isn't already in the dataset
                for (DatasetVersion dsv : existingDs.getVersions()) {
                    if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
                        throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
                    }
                }
                DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
                status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
                createdId = dsv.getId();
            }
        } else {
            Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
            status = " created dataset, id=" + managedDs.getId() + ".";
            createdId = managedDs.getId();
        }
    } catch (JsonParseException ex) {
        logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
        throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
    } catch (CommandException ex) {
        logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
        throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
    }
    return Json.createObjectBuilder().add("message", status);
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) CreateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand)

Example 38 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class ContainerManagerImpl method replaceMetadata.

@Override
public DepositReceipt replaceMetadata(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException {
    AuthenticatedUser user = swordAuth.auth(authCredentials);
    DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
    logger.fine("replaceMetadata called with url: " + uri);
    urlManager.processUrl(uri);
    String targetType = urlManager.getTargetType();
    if (!targetType.isEmpty()) {
        logger.fine("operating on target type: " + urlManager.getTargetType());
        if ("dataverse".equals(targetType)) {
            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Metadata replace of dataverse is not supported.");
        } else if ("study".equals(targetType)) {
            logger.fine("replacing metadata for dataset");
            // do a sanity check on the XML received
            try {
                SwordEntry swordEntry = deposit.getSwordEntry();
                logger.fine("deposit XML received by replaceMetadata():\n" + swordEntry);
            } catch (ParseException ex) {
                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can not replace dataset metadata due to malformed Atom entry: " + ex);
            }
            String globalId = urlManager.getTargetIdentifier();
            Dataset dataset = datasetService.findByGlobalId(globalId);
            if (dataset != null) {
                Dataverse dvThatOwnsDataset = dataset.getOwner();
                UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq);
                if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias());
                }
                DatasetVersion datasetVersion = dataset.getEditVersion();
                // erase all metadata before creating populating dataset version
                List<DatasetField> emptyDatasetFields = new ArrayList<>();
                datasetVersion.setDatasetFields(emptyDatasetFields);
                String foreignFormat = SwordUtil.DCTERMS;
                try {
                    importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, datasetVersion);
                } catch (Exception ex) {
                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
                }
                swordService.addDatasetContact(datasetVersion, user);
                swordService.addDatasetDepositor(datasetVersion, user);
                swordService.addDatasetSubjectIfMissing(datasetVersion);
                swordService.setDatasetLicenseAndTermsOfUse(datasetVersion, deposit.getSwordEntry());
                try {
                    engineSvc.submit(updateDatasetCommand);
                } catch (CommandException ex) {
                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem updating dataset: " + ex);
                }
                ReceiptGenerator receiptGenerator = new ReceiptGenerator();
                String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
                DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
                return depositReceipt;
            } else {
                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset based on global id (" + globalId + ") in URL: " + uri);
            }
        } else {
            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unknown target type specified on which to replace metadata: " + uri);
        }
    } else {
        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target specified on which to replace metadata: " + uri);
    }
}
Also used : SwordError(org.swordapp.server.SwordError) Dataset(edu.harvard.iq.dataverse.Dataset) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) AuthenticatedUser(edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser) Dataverse(edu.harvard.iq.dataverse.Dataverse) SwordAuthException(org.swordapp.server.SwordAuthException) SwordServerException(org.swordapp.server.SwordServerException) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) EJBException(javax.ejb.EJBException) CommandExecutionException(edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException) ParseException(org.apache.abdera.parser.ParseException) DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) DepositReceipt(org.swordapp.server.DepositReceipt) SwordEntry(org.swordapp.server.SwordEntry) ArrayList(java.util.ArrayList) List(java.util.List) ParseException(org.apache.abdera.parser.ParseException) UpdateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand)

Example 39 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class JsonPrinterTest method testJson_PrivateUrl.

@Test
public void testJson_PrivateUrl() {
    DataverseRole aRole = new DataverseRole();
    PrivateUrlUser privateUrlUserIn = new PrivateUrlUser(42);
    RoleAssignee anAssignee = privateUrlUserIn;
    Dataset dataset = new Dataset();
    String privateUrlToken = "e1d53cf6-794a-457a-9709-7c07629a8267";
    RoleAssignment ra = new RoleAssignment(aRole, anAssignee, dataset, privateUrlToken);
    String dataverseSiteUrl = "https://dataverse.example.edu";
    PrivateUrl privateUrl = new PrivateUrl(ra, dataset, dataverseSiteUrl);
    JsonObjectBuilder job = JsonPrinter.json(privateUrl);
    assertNotNull(job);
    JsonObject jsonObject = job.build();
    assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token"));
    assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link"));
    assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken"));
    assertEquals("#42", jsonObject.getJsonObject("roleAssignment").getString("assignee"));
}
Also used : PrivateUrl(edu.harvard.iq.dataverse.privateurl.PrivateUrl) Dataset(edu.harvard.iq.dataverse.Dataset) PrivateUrlUser(edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser) RoleAssignment(edu.harvard.iq.dataverse.RoleAssignment) JsonObject(javax.json.JsonObject) JsonObjectBuilder(javax.json.JsonObjectBuilder) DataverseRole(edu.harvard.iq.dataverse.authorization.DataverseRole) RoleAssignee(edu.harvard.iq.dataverse.authorization.RoleAssignee) Test(org.junit.Test)

Example 40 with Dataset

use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.

the class JsonParserTest method testparseFiles.

@Test
public void testparseFiles() throws JsonParseException {
    JsonArrayBuilder metadatasJsonBuilder = Json.createArrayBuilder();
    JsonObjectBuilder fileMetadataGood = Json.createObjectBuilder();
    fileMetadataGood.add("label", "myLabel");
    JsonObjectBuilder fileGood = Json.createObjectBuilder();
    fileMetadataGood.add("dataFile", fileGood);
    fileMetadataGood.add("categories", Json.createArrayBuilder().add("Documentation"));
    JsonObjectBuilder fileMetadataBad = Json.createObjectBuilder();
    fileMetadataBad.add("label", "bad");
    JsonObjectBuilder fileBad = Json.createObjectBuilder();
    fileMetadataBad.add("dataFile", fileBad);
    fileMetadataBad.add("categories", Json.createArrayBuilder().add(BigDecimal.ONE));
    metadatasJsonBuilder.add(fileMetadataGood);
    metadatasJsonBuilder.add(fileMetadataBad);
    JsonArray metadatasJson = metadatasJsonBuilder.build();
    DatasetVersion dsv = new DatasetVersion();
    Dataset dataset = new Dataset();
    dsv.setDataset(dataset);
    List<FileMetadata> fileMetadatas = new JsonParser().parseFiles(metadatasJson, dsv);
    System.out.println("fileMetadatas: " + fileMetadatas);
    assertEquals("myLabel", fileMetadatas.get(0).getLabel());
    assertEquals("Documentation", fileMetadatas.get(0).getCategories().get(0).getName());
    assertEquals(null, fileMetadatas.get(1).getCategories());
    List<FileMetadata> codeCoverage = new JsonParser().parseFiles(Json.createArrayBuilder().add(Json.createObjectBuilder().add("label", "myLabel").add("dataFile", Json.createObjectBuilder().add("categories", JsonValue.NULL))).build(), dsv);
    assertEquals(null, codeCoverage.get(0).getCategories());
}
Also used : JsonArray(javax.json.JsonArray) Dataset(edu.harvard.iq.dataverse.Dataset) FileMetadata(edu.harvard.iq.dataverse.FileMetadata) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JsonArrayBuilder(javax.json.JsonArrayBuilder) JsonObjectBuilder(javax.json.JsonObjectBuilder) Test(org.junit.Test)

Aggregations

Dataset (edu.harvard.iq.dataverse.Dataset)142 Test (org.junit.Test)53 DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)44 DataFile (edu.harvard.iq.dataverse.DataFile)41 Dataverse (edu.harvard.iq.dataverse.Dataverse)34 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)27 CommandException (edu.harvard.iq.dataverse.engine.command.exception.CommandException)23 IOException (java.io.IOException)22 ArrayList (java.util.ArrayList)22 Path (javax.ws.rs.Path)22 FileMetadata (edu.harvard.iq.dataverse.FileMetadata)17 DataverseRole (edu.harvard.iq.dataverse.authorization.DataverseRole)17 RoleAssignment (edu.harvard.iq.dataverse.RoleAssignment)16 DataverseRequest (edu.harvard.iq.dataverse.engine.command.DataverseRequest)16 Date (java.util.Date)16 JsonObjectBuilder (javax.json.JsonObjectBuilder)16 JsonObject (javax.json.JsonObject)15 DvObject (edu.harvard.iq.dataverse.DvObject)14 RoleAssignee (edu.harvard.iq.dataverse.authorization.RoleAssignee)14 PrivateUrlUser (edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser)14