Search in sources :

Example 1 with DatasetField

use of edu.harvard.iq.dataverse.DatasetField in project dataverse by IQSS.

the class MocksFactory method makeDatasetVersion.

public static DatasetVersion makeDatasetVersion(List<DataFileCategory> categories) {
    final DatasetVersion retVal = new DatasetVersion();
    final List<DataFile> files = makeFiles(10);
    final List<FileMetadata> metadatas = new ArrayList<>(10);
    Random rand = new Random();
    files.forEach(df -> {
        df.getFileMetadata().addCategory(categories.get(rand.nextInt(categories.size())));
        metadatas.add(df.getFileMetadata());
    });
    retVal.setFileMetadatas(metadatas);
    List<DatasetField> fields = new ArrayList<>();
    DatasetField field = new DatasetField();
    field.setId(nextId());
    field.setSingleValue("Sample Field Value");
    field.setDatasetFieldType(makeDatasetFieldType());
    fields.add(field);
    retVal.setDatasetFields(fields);
    return retVal;
}
Also used : DataFile(edu.harvard.iq.dataverse.DataFile) Random(java.util.Random) DatasetField(edu.harvard.iq.dataverse.DatasetField) FileMetadata(edu.harvard.iq.dataverse.FileMetadata) ArrayList(java.util.ArrayList) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion)

Example 2 with DatasetField

use of edu.harvard.iq.dataverse.DatasetField in project dataverse by IQSS.

the class MocksFactory method makeDataset.

public static Dataset makeDataset() {
    Dataset ds = new Dataset();
    ds.setId(nextId());
    ds.setIdentifier("sample-ds-" + ds.getId());
    ds.setCategoriesByName(Arrays.asList("CatOne", "CatTwo", "CatThree"));
    final List<DataFile> files = makeFiles(10);
    final List<FileMetadata> metadatas = new ArrayList<>(10);
    final List<DataFileCategory> categories = ds.getCategories();
    Random rand = new Random();
    files.forEach(df -> {
        df.getFileMetadata().addCategory(categories.get(rand.nextInt(categories.size())));
        metadatas.add(df.getFileMetadata());
    });
    ds.setFiles(files);
    final DatasetVersion initialVersion = ds.getVersions().get(0);
    initialVersion.setFileMetadatas(metadatas);
    List<DatasetField> fields = new ArrayList<>();
    DatasetField field = new DatasetField();
    field.setId(nextId());
    field.setSingleValue("Sample Field Value");
    field.setDatasetFieldType(makeDatasetFieldType());
    fields.add(field);
    initialVersion.setDatasetFields(fields);
    ds.setOwner(makeDataverse());
    return ds;
}
Also used : DataFile(edu.harvard.iq.dataverse.DataFile) DataFileCategory(edu.harvard.iq.dataverse.DataFileCategory) Random(java.util.Random) DatasetField(edu.harvard.iq.dataverse.DatasetField) Dataset(edu.harvard.iq.dataverse.Dataset) FileMetadata(edu.harvard.iq.dataverse.FileMetadata) ArrayList(java.util.ArrayList) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion)

Example 3 with DatasetField

use of edu.harvard.iq.dataverse.DatasetField in project dataverse by IQSS.

the class ImportServiceBean method doImport.

public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
    String status = "";
    Long createdId = null;
    DatasetDTO dsDTO = null;
    try {
        dsDTO = importDDIService.doImport(importType, xmlToParse);
    } catch (XMLStreamException e) {
        throw new ImportException("XMLStreamException" + e);
    }
    // convert DTO to Json,
    Gson gson = new GsonBuilder().setPrettyPrinting().create();
    String json = gson.toJson(dsDTO);
    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    // and call parse Json to read it into a dataset
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(!importType.equals(ImportType.NEW));
        Dataset ds = parser.parseDataset(obj);
        // we support, it will be rejected.
        if (importType.equals(ImportType.NEW)) {
            if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
                throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
            }
        }
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
                // For migration and harvest, add NA for missing required values
                for (ConstraintViolation<DatasetField> v : violations) {
                    DatasetField f = v.getRootBean();
                    f.setSingleValue(DatasetField.NA_VALUE);
                }
            } else {
                // when importing a new dataset, the import will fail
                // if required values are missing.
                String errMsg = "Error importing data:";
                for (ConstraintViolation<DatasetField> v : violations) {
                    errMsg += " " + v.getMessage();
                }
                throw new ImportException(errMsg);
            }
        }
        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, fileName);
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    if (importType.equals(ImportType.HARVEST)) {
                        String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
                        cleanupLog.println(msg);
                        f.setValue(DatasetField.NA_VALUE);
                    } else {
                        String msg = " Validation error for ";
                        if (converted) {
                            msg += "converted ";
                        }
                        msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
                        throw new ImportException(msg);
                    }
                }
            }
        }
        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
        if (existingDs != null) {
            if (importType.equals(ImportType.HARVEST)) {
                // We will replace the current version with the imported version.
                if (existingDs.getVersions().size() != 1) {
                    throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
                }
                engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
                Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
                status = " updated dataset, id=" + managedDs.getId() + ".";
            } else {
                // check that the version number isn't already in the dataset
                for (DatasetVersion dsv : existingDs.getVersions()) {
                    if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
                        throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
                    }
                }
                DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
                status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
                createdId = dsv.getId();
            }
        } else {
            Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
            status = " created dataset, id=" + managedDs.getId() + ".";
            createdId = managedDs.getId();
        }
    } catch (JsonParseException ex) {
        logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
        throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
    } catch (CommandException ex) {
        logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
        throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
    }
    return Json.createObjectBuilder().add("message", status);
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) CreateDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand) Gson(com.google.gson.Gson) JsonObject(javax.json.JsonObject) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JsonParseException(edu.harvard.iq.dataverse.util.json.JsonParseException) DatasetDTO(edu.harvard.iq.dataverse.api.dto.DatasetDTO) DatasetFieldValue(edu.harvard.iq.dataverse.DatasetFieldValue) StringReader(java.io.StringReader) JsonReader(javax.json.JsonReader) JsonParser(edu.harvard.iq.dataverse.util.json.JsonParser) CreateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand) ValidatorFactory(javax.validation.ValidatorFactory) GsonBuilder(com.google.gson.GsonBuilder) Dataset(edu.harvard.iq.dataverse.Dataset) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) XMLStreamException(javax.xml.stream.XMLStreamException) ConstraintViolation(javax.validation.ConstraintViolation) Validator(javax.validation.Validator) DestroyDatasetCommand(edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand)

Example 4 with DatasetField

use of edu.harvard.iq.dataverse.DatasetField in project dataverse by IQSS.

the class SwordServiceBean method addDatasetDepositor.

/**
 * Mutate the dataset version, adding a depositor for the dataset.
 */
public void addDatasetDepositor(DatasetVersion newDatasetVersion, User user) {
    if (!user.isAuthenticated()) {
        logger.info("returning early since user is not authenticated");
        return;
    }
    AuthenticatedUser au = (AuthenticatedUser) user;
    DatasetFieldType depositorDatasetFieldType = datasetFieldService.findByNameOpt(DatasetFieldConstant.depositor);
    DatasetField depositorDatasetField = DatasetField.createNewEmptyDatasetField(depositorDatasetFieldType, newDatasetVersion);
    depositorDatasetField.setSingleValue(au.getLastName() + ", " + au.getFirstName());
    newDatasetVersion.getDatasetFields().add(depositorDatasetField);
}
Also used : DatasetField(edu.harvard.iq.dataverse.DatasetField) AuthenticatedUser(edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser) DatasetFieldType(edu.harvard.iq.dataverse.DatasetFieldType)

Example 5 with DatasetField

use of edu.harvard.iq.dataverse.DatasetField in project dataverse by IQSS.

the class JsonPrinterTest method testDatasetContactOutOfBoxNoPrivacy.

@Test
public void testDatasetContactOutOfBoxNoPrivacy() {
    MetadataBlock block = new MetadataBlock();
    block.setName("citation");
    List<DatasetField> fields = new ArrayList<>();
    DatasetField datasetContactField = new DatasetField();
    DatasetFieldType datasetContactDatasetFieldType = datasetFieldTypeSvc.findByName("datasetContact");
    datasetContactDatasetFieldType.setMetadataBlock(block);
    datasetContactField.setDatasetFieldType(datasetContactDatasetFieldType);
    List<DatasetFieldCompoundValue> vals = new LinkedList<>();
    DatasetFieldCompoundValue val = new DatasetFieldCompoundValue();
    val.setParentDatasetField(datasetContactField);
    val.setChildDatasetFields(Arrays.asList(constructPrimitive("datasetContactEmail", "foo@bar.com"), constructPrimitive("datasetContactName", "Foo Bar"), constructPrimitive("datasetContactAffiliation", "Bar University")));
    vals.add(val);
    datasetContactField.setDatasetFieldCompoundValues(vals);
    fields.add(datasetContactField);
    SettingsServiceBean nullServiceBean = null;
    JsonPrinter jsonPrinter = new JsonPrinter(nullServiceBean);
    JsonObject jsonObject = jsonPrinter.json(block, fields).build();
    assertNotNull(jsonObject);
    System.out.println("json: " + JsonUtil.prettyPrint(jsonObject.toString()));
    assertEquals("Foo Bar", jsonObject.getJsonArray("fields").getJsonObject(0).getJsonArray("value").getJsonObject(0).getJsonObject("datasetContactName").getString("value"));
    assertEquals("Bar University", jsonObject.getJsonArray("fields").getJsonObject(0).getJsonArray("value").getJsonObject(0).getJsonObject("datasetContactAffiliation").getString("value"));
    assertEquals("foo@bar.com", jsonObject.getJsonArray("fields").getJsonObject(0).getJsonArray("value").getJsonObject(0).getJsonObject("datasetContactEmail").getString("value"));
    JsonObject byBlocks = jsonPrinter.jsonByBlocks(fields).build();
    System.out.println("byBlocks: " + JsonUtil.prettyPrint(byBlocks.toString()));
    assertEquals("Foo Bar", byBlocks.getJsonObject("citation").getJsonArray("fields").getJsonObject(0).getJsonArray("value").getJsonObject(0).getJsonObject("datasetContactName").getString("value"));
    assertEquals("Bar University", byBlocks.getJsonObject("citation").getJsonArray("fields").getJsonObject(0).getJsonArray("value").getJsonObject(0).getJsonObject("datasetContactAffiliation").getString("value"));
    assertEquals("foo@bar.com", byBlocks.getJsonObject("citation").getJsonArray("fields").getJsonObject(0).getJsonArray("value").getJsonObject(0).getJsonObject("datasetContactEmail").getString("value"));
}
Also used : MetadataBlock(edu.harvard.iq.dataverse.MetadataBlock) DatasetField(edu.harvard.iq.dataverse.DatasetField) ArrayList(java.util.ArrayList) JsonObject(javax.json.JsonObject) DatasetFieldType(edu.harvard.iq.dataverse.DatasetFieldType) DatasetFieldCompoundValue(edu.harvard.iq.dataverse.DatasetFieldCompoundValue) LinkedList(java.util.LinkedList) SettingsServiceBean(edu.harvard.iq.dataverse.settings.SettingsServiceBean) Test(org.junit.Test)

Aggregations

DatasetField (edu.harvard.iq.dataverse.DatasetField)32 DatasetFieldType (edu.harvard.iq.dataverse.DatasetFieldType)16 JsonObject (javax.json.JsonObject)13 DatasetFieldCompoundValue (edu.harvard.iq.dataverse.DatasetFieldCompoundValue)11 DatasetFieldValue (edu.harvard.iq.dataverse.DatasetFieldValue)11 ArrayList (java.util.ArrayList)11 Test (org.junit.Test)8 ControlledVocabularyValue (edu.harvard.iq.dataverse.ControlledVocabularyValue)7 DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)7 LinkedList (java.util.LinkedList)7 Dataset (edu.harvard.iq.dataverse.Dataset)6 DataFile (edu.harvard.iq.dataverse.DataFile)4 FileMetadata (edu.harvard.iq.dataverse.FileMetadata)4 ConstraintViolation (javax.validation.ConstraintViolation)4 Gson (com.google.gson.Gson)3 Dataverse (edu.harvard.iq.dataverse.Dataverse)3 MetadataBlock (edu.harvard.iq.dataverse.MetadataBlock)3 StringReader (java.io.StringReader)3 Timestamp (java.sql.Timestamp)3 Date (java.util.Date)3