use of edu.harvard.iq.dataverse.DatasetFieldType in project dataverse by IQSS.
the class JsonParserTest method testControlledVocalNoRepeatsRoundTrip.
@Test
public void testControlledVocalNoRepeatsRoundTrip() throws JsonParseException {
DatasetField expected = new DatasetField();
DatasetFieldType fieldType = datasetFieldTypeSvc.findByName("publicationIdType");
expected.setDatasetFieldType(fieldType);
expected.setControlledVocabularyValues(Collections.singletonList(fieldType.getControlledVocabularyValue("ark")));
JsonObject json = JsonPrinter.json(expected);
DatasetField actual = sut.parseField(json);
assertFieldsEqual(expected, actual);
}
use of edu.harvard.iq.dataverse.DatasetFieldType in project dataverse by IQSS.
the class DDIExporterTest method setUp.
@Before
public void setUp() {
datasetFieldTypeSvc = new MockDatasetFieldSvc();
DatasetFieldType titleType = datasetFieldTypeSvc.add(new DatasetFieldType("title", FieldType.TEXTBOX, false));
DatasetFieldType authorType = datasetFieldTypeSvc.add(new DatasetFieldType("author", FieldType.TEXT, true));
Set<DatasetFieldType> authorChildTypes = new HashSet<>();
authorChildTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("authorName", FieldType.TEXT, false)));
authorChildTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("authorAffiliation", FieldType.TEXT, false)));
authorChildTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("authorIdentifier", FieldType.TEXT, false)));
DatasetFieldType authorIdentifierSchemeType = datasetFieldTypeSvc.add(new DatasetFieldType("authorIdentifierScheme", FieldType.TEXT, false));
authorIdentifierSchemeType.setAllowControlledVocabulary(true);
authorIdentifierSchemeType.setControlledVocabularyValues(Arrays.asList(// Why aren't these enforced? Should be ORCID, etc.
new ControlledVocabularyValue(1l, "ark", authorIdentifierSchemeType), new ControlledVocabularyValue(2l, "doi", authorIdentifierSchemeType), new ControlledVocabularyValue(3l, "url", authorIdentifierSchemeType)));
authorChildTypes.add(datasetFieldTypeSvc.add(authorIdentifierSchemeType));
for (DatasetFieldType t : authorChildTypes) {
t.setParentDatasetFieldType(authorType);
}
authorType.setChildDatasetFieldTypes(authorChildTypes);
DatasetFieldType datasetContactType = datasetFieldTypeSvc.add(new DatasetFieldType("datasetContact", FieldType.TEXT, true));
Set<DatasetFieldType> datasetContactTypes = new HashSet<>();
datasetContactTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("datasetContactEmail", FieldType.TEXT, false)));
datasetContactTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("datasetContactName", FieldType.TEXT, false)));
datasetContactTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("datasetContactAffiliation", FieldType.TEXT, false)));
for (DatasetFieldType t : datasetContactTypes) {
t.setParentDatasetFieldType(datasetContactType);
}
datasetContactType.setChildDatasetFieldTypes(datasetContactTypes);
DatasetFieldType dsDescriptionType = datasetFieldTypeSvc.add(new DatasetFieldType("dsDescription", FieldType.TEXT, true));
Set<DatasetFieldType> dsDescriptionTypes = new HashSet<>();
dsDescriptionTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("dsDescriptionValue", FieldType.TEXT, false)));
for (DatasetFieldType t : dsDescriptionTypes) {
t.setParentDatasetFieldType(dsDescriptionType);
}
dsDescriptionType.setChildDatasetFieldTypes(dsDescriptionTypes);
DatasetFieldType keywordType = datasetFieldTypeSvc.add(new DatasetFieldType("keyword", DatasetFieldType.FieldType.TEXT, true));
DatasetFieldType descriptionType = datasetFieldTypeSvc.add(new DatasetFieldType("description", DatasetFieldType.FieldType.TEXTBOX, false));
DatasetFieldType subjectType = datasetFieldTypeSvc.add(new DatasetFieldType("subject", DatasetFieldType.FieldType.TEXT, true));
subjectType.setAllowControlledVocabulary(true);
subjectType.setControlledVocabularyValues(Arrays.asList(new ControlledVocabularyValue(1l, "mgmt", subjectType), new ControlledVocabularyValue(2l, "law", subjectType), new ControlledVocabularyValue(3l, "cs", subjectType)));
DatasetFieldType pubIdType = datasetFieldTypeSvc.add(new DatasetFieldType("publicationIdType", DatasetFieldType.FieldType.TEXT, false));
pubIdType.setAllowControlledVocabulary(true);
pubIdType.setControlledVocabularyValues(Arrays.asList(new ControlledVocabularyValue(1l, "ark", pubIdType), new ControlledVocabularyValue(2l, "doi", pubIdType), new ControlledVocabularyValue(3l, "url", pubIdType)));
DatasetFieldType compoundSingleType = datasetFieldTypeSvc.add(new DatasetFieldType("coordinate", DatasetFieldType.FieldType.TEXT, true));
Set<DatasetFieldType> childTypes = new HashSet<>();
childTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("lat", DatasetFieldType.FieldType.TEXT, false)));
childTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("lon", DatasetFieldType.FieldType.TEXT, false)));
for (DatasetFieldType t : childTypes) {
t.setParentDatasetFieldType(compoundSingleType);
}
compoundSingleType.setChildDatasetFieldTypes(childTypes);
}
use of edu.harvard.iq.dataverse.DatasetFieldType in project dataverse by IQSS.
the class SchemaDotOrgExporterTest method setUp.
@Before
public void setUp() {
datasetFieldTypeSvc = new DDIExporterTest.MockDatasetFieldSvc();
DatasetFieldType titleType = datasetFieldTypeSvc.add(new DatasetFieldType("title", DatasetFieldType.FieldType.TEXTBOX, false));
DatasetFieldType authorType = datasetFieldTypeSvc.add(new DatasetFieldType("author", DatasetFieldType.FieldType.TEXT, true));
Set<DatasetFieldType> authorChildTypes = new HashSet<>();
authorChildTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("authorName", DatasetFieldType.FieldType.TEXT, false)));
authorChildTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("authorAffiliation", DatasetFieldType.FieldType.TEXT, false)));
authorChildTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("authorIdentifier", DatasetFieldType.FieldType.TEXT, false)));
DatasetFieldType authorIdentifierSchemeType = datasetFieldTypeSvc.add(new DatasetFieldType("authorIdentifierScheme", DatasetFieldType.FieldType.TEXT, false));
authorIdentifierSchemeType.setAllowControlledVocabulary(true);
authorIdentifierSchemeType.setControlledVocabularyValues(Arrays.asList(// Why aren't these enforced? Should be ORCID, etc.
new ControlledVocabularyValue(1l, "ark", authorIdentifierSchemeType), new ControlledVocabularyValue(2l, "doi", authorIdentifierSchemeType), new ControlledVocabularyValue(3l, "url", authorIdentifierSchemeType)));
authorChildTypes.add(datasetFieldTypeSvc.add(authorIdentifierSchemeType));
for (DatasetFieldType t : authorChildTypes) {
t.setParentDatasetFieldType(authorType);
}
authorType.setChildDatasetFieldTypes(authorChildTypes);
DatasetFieldType datasetContactType = datasetFieldTypeSvc.add(new DatasetFieldType("datasetContact", DatasetFieldType.FieldType.TEXT, true));
Set<DatasetFieldType> datasetContactTypes = new HashSet<>();
datasetContactTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("datasetContactEmail", DatasetFieldType.FieldType.TEXT, false)));
datasetContactTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("datasetContactName", DatasetFieldType.FieldType.TEXT, false)));
datasetContactTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("datasetContactAffiliation", DatasetFieldType.FieldType.TEXT, false)));
for (DatasetFieldType t : datasetContactTypes) {
t.setParentDatasetFieldType(datasetContactType);
}
datasetContactType.setChildDatasetFieldTypes(datasetContactTypes);
DatasetFieldType dsDescriptionType = datasetFieldTypeSvc.add(new DatasetFieldType("dsDescription", DatasetFieldType.FieldType.TEXT, true));
Set<DatasetFieldType> dsDescriptionTypes = new HashSet<>();
dsDescriptionTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("dsDescriptionValue", DatasetFieldType.FieldType.TEXT, false)));
for (DatasetFieldType t : dsDescriptionTypes) {
t.setParentDatasetFieldType(dsDescriptionType);
}
dsDescriptionType.setChildDatasetFieldTypes(dsDescriptionTypes);
DatasetFieldType keywordType = datasetFieldTypeSvc.add(new DatasetFieldType("keyword", DatasetFieldType.FieldType.TEXT, true));
DatasetFieldType descriptionType = datasetFieldTypeSvc.add(new DatasetFieldType("description", DatasetFieldType.FieldType.TEXTBOX, false));
DatasetFieldType subjectType = datasetFieldTypeSvc.add(new DatasetFieldType("subject", DatasetFieldType.FieldType.TEXT, true));
subjectType.setAllowControlledVocabulary(true);
subjectType.setControlledVocabularyValues(Arrays.asList(new ControlledVocabularyValue(1l, "mgmt", subjectType), new ControlledVocabularyValue(2l, "law", subjectType), new ControlledVocabularyValue(3l, "cs", subjectType)));
DatasetFieldType pubIdType = datasetFieldTypeSvc.add(new DatasetFieldType("publicationIdType", DatasetFieldType.FieldType.TEXT, false));
pubIdType.setAllowControlledVocabulary(true);
pubIdType.setControlledVocabularyValues(Arrays.asList(new ControlledVocabularyValue(1l, "ark", pubIdType), new ControlledVocabularyValue(2l, "doi", pubIdType), new ControlledVocabularyValue(3l, "url", pubIdType)));
DatasetFieldType compoundSingleType = datasetFieldTypeSvc.add(new DatasetFieldType("coordinate", DatasetFieldType.FieldType.TEXT, true));
Set<DatasetFieldType> childTypes = new HashSet<>();
childTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("lat", DatasetFieldType.FieldType.TEXT, false)));
childTypes.add(datasetFieldTypeSvc.add(new DatasetFieldType("lon", DatasetFieldType.FieldType.TEXT, false)));
for (DatasetFieldType t : childTypes) {
t.setParentDatasetFieldType(compoundSingleType);
}
compoundSingleType.setChildDatasetFieldTypes(childTypes);
}
use of edu.harvard.iq.dataverse.DatasetFieldType in project dataverse by IQSS.
the class CreateDataverseCommandTest method testCustomOptions.
@Test
public void testCustomOptions() throws CommandException {
Dataverse dv = makeDataverse();
Timestamp creation = timestamp(1990, 12, 12);
AuthenticatedUser creator = makeAuthenticatedUser("Joe", "Walsh");
dv.setCreateDate(creation);
dv.setId(null);
dv.setCreator(creator);
dv.setDefaultContributorRole(null);
dv.setOwner(makeDataverse());
dv.setDataverseType(Dataverse.DataverseType.JOURNALS);
dv.setDefaultContributorRole(roles.findBuiltinRoleByAlias(DataverseRole.MANAGER));
final DataverseRequest request = makeRequest();
List<DatasetFieldType> expectedFacets = Arrays.asList(makeDatasetFieldType(), makeDatasetFieldType(), makeDatasetFieldType());
List<DataverseFieldTypeInputLevel> dftils = Arrays.asList(makeDataverseFieldTypeInputLevel(makeDatasetFieldType()), makeDataverseFieldTypeInputLevel(makeDatasetFieldType()), makeDataverseFieldTypeInputLevel(makeDatasetFieldType()));
CreateDataverseCommand sut = new CreateDataverseCommand(dv, request, new LinkedList(expectedFacets), new LinkedList(dftils));
Dataverse result = engine.submit(sut);
assertEquals(creation, result.getCreateDate());
assertNotNull(result.getId());
assertEquals(creator, result.getCreator());
assertEquals(Dataverse.DataverseType.JOURNALS, result.getDataverseType());
assertEquals(roles.findBuiltinRoleByAlias(DataverseRole.MANAGER), result.getDefaultContributorRole());
// Assert that the creator is admin.
final RoleAssignment roleAssignment = roles.directRoleAssignments(dv).get(0);
assertEquals(roles.findBuiltinRoleByAlias(DataverseRole.ADMIN), roleAssignment.getRole());
assertEquals(dv, roleAssignment.getDefinitionPoint());
assertEquals(roleAssignment.getAssigneeIdentifier(), request.getUser().getIdentifier());
assertTrue(result.isPermissionRoot());
assertTrue(result.isThemeRoot());
assertTrue(indexCalled);
assertTrue(facetsDeleted);
int i = 0;
for (DataverseFacet df : createdFacets) {
assertEquals(i, df.getDisplayOrder());
assertEquals(result, df.getDataverse());
assertEquals(expectedFacets.get(i), df.getDatasetFieldType());
i++;
}
assertTrue(dftilsDeleted);
for (DataverseFieldTypeInputLevel dftil : createdDftils) {
assertEquals(result, dftil.getDataverse());
}
}
use of edu.harvard.iq.dataverse.DatasetFieldType in project dataverse by IQSS.
the class JsonParser method parseField.
public DatasetField parseField(JsonObject json) throws JsonParseException {
if (json == null) {
return null;
}
DatasetField ret = new DatasetField();
DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", ""));
if (type == null) {
throw new JsonParseException("Can't find type '" + json.getString("typeName", "") + "'");
}
if (type.isAllowMultiples() != json.getBoolean("multiple")) {
throw new JsonParseException("incorrect multiple for field " + json.getString("typeName", ""));
}
if (type.isCompound() && !json.getString("typeClass").equals("compound")) {
throw new JsonParseException("incorrect typeClass for field " + json.getString("typeName", "") + ", should be compound.");
}
if (!type.isControlledVocabulary() && type.isPrimitive() && !json.getString("typeClass").equals("primitive")) {
throw new JsonParseException("incorrect typeClass for field: " + json.getString("typeName", "") + ", should be primitive");
}
if (type.isControlledVocabulary() && !json.getString("typeClass").equals("controlledVocabulary")) {
throw new JsonParseException("incorrect typeClass for field " + json.getString("typeName", "") + ", should be controlledVocabulary");
}
ret.setDatasetFieldType(type);
if (type.isCompound()) {
List<DatasetFieldCompoundValue> vals = parseCompoundValue(type, json);
for (DatasetFieldCompoundValue dsfcv : vals) {
dsfcv.setParentDatasetField(ret);
}
ret.setDatasetFieldCompoundValues(vals);
} else if (type.isControlledVocabulary()) {
List<ControlledVocabularyValue> vals = parseControlledVocabularyValue(type, json);
for (ControlledVocabularyValue cvv : vals) {
cvv.setDatasetFieldType(type);
}
ret.setControlledVocabularyValues(vals);
} else {
// primitive
List<DatasetFieldValue> values = parsePrimitiveValue(json);
for (DatasetFieldValue val : values) {
val.setDatasetField(ret);
}
ret.setDatasetFieldValues(values);
}
return ret;
}
Aggregations