use of org.gbif.ipt.model.VocabularyTerm in project ipt by gbif.
the class VocabularyFactoryTest method testBuild.
@Test
public void testBuild() {
try {
Vocabulary tv = getFactory().build(VocabularyFactoryTest.class.getResourceAsStream("/thesauri/type-vocabulary.xml"));
assertEquals("Dublin Core Type Vocabulary", tv.getTitle());
assertEquals("http://dublincore.org/documents/dcmi-type-vocabulary/", tv.getUriString());
assertEquals("The DCMI Type Vocabulary provides a general, cross-domain list of approved terms that may be used as values for the Resource Type element to identify the genre of a resource. The terms documented here are also included in the more comprehensive document \"DCMI Metadata Terms\" at http://dublincore.org/documents/dcmi-terms/.", tv.getDescription());
assertEquals("http://dublincore.org/documents/dcmi-type-vocabulary/", tv.getLink().toString());
assertNotNull(tv.getConcepts());
assertEquals(12, tv.getConcepts().size());
VocabularyConcept tc = tv.getConcepts().get(0);
assertEquals("Collection", tc.getIdentifier());
assertNull(tc.getLink());
assertEquals("http://purl.org/dc/dcmitype/Collection", tc.getUri());
assertEquals(tv, tc.getVocabulary());
assertEquals("Collection", tc.getPreferredTerm("en").getTitle());
assertEquals("Sammlung", tc.getPreferredTerm("de").getTitle());
assertNotNull(tc.getTerms());
assertNotNull(tc.getPreferredTerms());
assertEquals(2, tc.getPreferredTerms().size());
assertEquals(0, tc.getAlternativeTerms().size());
assertEquals(2, tc.getTerms().size());
// previously there was an assertion that caused IPT to fail when built with Java 5
// Java 5 - term that comes off iterator 1st is de
// Java 6 - term that comes off iterator 1st is en
VocabularyTerm tt = tc.getTerms().iterator().next();
if (tt.getLang().equals("en")) {
assertEquals("Collection", tt.getTitle());
} else {
assertEquals("de", tt.getLang());
assertEquals("Sammlung", tt.getTitle());
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
use of org.gbif.ipt.model.VocabularyTerm in project ipt by gbif.
the class TranslationActionTest method setup.
@BeforeEach
public void setup() throws Exception {
// mock needed managers
SimpleTextProvider mockTextProvider = mock(SimpleTextProvider.class);
LocaleProviderFactory localeProviderFactory = new DefaultLocaleProviderFactory();
AppConfig mockCfg = mock(AppConfig.class);
ResourceManager mockResourceManager = mock(ResourceManager.class);
SourceManager mockSourceManager = mock(SourceManager.class);
VocabulariesManager mockVocabManager = mock(VocabulariesManager.class);
TranslationAction.Translation translation = new TranslationAction.Translation();
RegistrationManager mockRegistrationManager = mock(RegistrationManager.class);
Container container = mock(Container.class);
// mock getting list of values back for BasisOfRecord field/column in source
Set<String> values = new LinkedHashSet<>();
values.add("spe");
values.add("obs");
values.add("fos");
when(mockSourceManager.inspectColumn(any(SourceBase.class), anyInt(), anyInt(), anyInt())).thenReturn(values);
// mock getI18nVocab - only called in prepare()
Map<String, String> mockVocab = new HashMap<>();
mockVocab.put("NomenclaturalChecklist", "Nomenclatural Checklist");
mockVocab.put("MachineObservation", "Machine Observation");
when(mockVocabManager.getI18nVocab(anyString(), anyString(), anyBoolean())).thenReturn(mockVocab);
// initialize new Resource
Resource resource = new Resource();
String resourceShortName = "TestResource";
resource.setShortname(resourceShortName);
// initialize new ExtensionMapping
ExtensionMapping mapping = new ExtensionMapping();
// add source to mapping
mapping.setSource(new TextFileSource());
ExtensionFactory factory = ExtensionFactoryTest.getFactory();
Extension e = factory.build(ExtensionFactoryTest.class.getResourceAsStream("/extensions/dwc_occurrence.xml"));
// ensure rowType for Extension is set
if (e.getRowType() == null) {
e.setRowType(Constants.DWC_ROWTYPE_TAXON);
}
// add extension to ExtensionMapping
mapping.setExtension(e);
// create map of source value
TreeMap<String, String> sourceValues = new TreeMap<>();
sourceValues.put("k1", "spe");
sourceValues.put("k2", "obs");
// create map of translation values
TreeMap<String, String> translatedValues = new TreeMap<>();
translatedValues.put("k1", "Preserved Specimen");
translatedValues.put("k2", "observation");
// create map of translations that get persisted
Map<String, String> persistedTranslations = new HashMap<>();
persistedTranslations.put("spe", "Preserved Specimen");
persistedTranslations.put("obs", "observation");
// initialize PropertyMapping for BasisOfRecord term
PropertyMapping field = new PropertyMapping();
// set ConceptTerm
field.setTerm(DwcTerm.basisOfRecord);
// set index
field.setIndex(1);
// add translations to field
field.setTranslation(persistedTranslations);
// add set of PropertyMapping, including field, to ExtensionMapping
Set<PropertyMapping> fields = new TreeSet<>();
fields.add(field);
mapping.setFields(fields);
// add ExtensionMapping to resource, with mapping ID 0
List<ExtensionMapping> mappings = new LinkedList<>();
mappings.add(mapping);
resource.setMappings(mappings);
// mock resourceManager.get - called only in ManagerBaseAction.prepare()
when(mockResourceManager.get(anyString())).thenReturn(resource);
// mock a locale provider
when(container.getInstance(LocaleProviderFactory.class)).thenReturn(localeProviderFactory);
// create mock Action
action = new TranslationAction(mockTextProvider, mockCfg, mockRegistrationManager, mockResourceManager, mockSourceManager, mockVocabManager, translation);
action.setContainer(container);
// initialize ExtensionProperty representing BasisOfRecord field on Occurrence core Extension
ExtensionProperty property = mapping.getExtension().getProperty(field.getTerm());
// set a vocabulary for the BasisOfRecord field
// mock creation of BasisOfRecord vocabulary
VocabularyConcept concept = new VocabularyConcept();
concept.setIdentifier("PreservedSpecimen");
concept.setUri("http://rs.tdwg.org/dwc/dwctype/PreservedSpecimen");
// preferred titles
Set<VocabularyTerm> preferredTerms = new HashSet<>();
VocabularyTerm term = new VocabularyTerm();
term.setLang("en");
term.setTitle("Preserved Specimen");
preferredTerms.add(term);
concept.setPreferredTerms(preferredTerms);
// alternative titles
Set<VocabularyTerm> alternateTerms = new HashSet<>();
term = new VocabularyTerm();
term.setLang("en");
term.setTitle("Conserved Specimen");
alternateTerms.add(term);
concept.setAlternativeTerms(alternateTerms);
Vocabulary vocab = new Vocabulary();
List<VocabularyConcept> concepts = new ArrayList<>();
concepts.add(concept);
vocab.setConcepts(concepts);
vocab.setUriString("http://rs.gbif.org/vocabulary/dwc/basis_of_record");
property.setVocabulary(vocab);
// create sessionScoped Translation
// populate sessionScoped Translation with translations
action.getTrans().setTmap(mapping.getExtension().getRowType(), property, sourceValues, translatedValues);
// set various properties on Action
action.setField(field);
action.setExtensionMapping(mapping);
action.setProperty(property);
// mock servlet request
HttpServletRequest mockRequest = mock(HttpServletRequest.class);
// the mapping id is 0 - relates to resource's List<ExtensionMapping> mappings
when(mockRequest.getParameter(TranslationAction.REQ_PARAM_MAPPINGID)).thenReturn("0");
when(mockRequest.getParameter(TranslationAction.REQ_PARAM_ROWTYPE)).thenReturn(Constants.DWC_ROWTYPE_OCCURRENCE);
when(mockRequest.getParameter(TranslationAction.REQ_PARAM_TERM)).thenReturn(DwcTerm.basisOfRecord.qualifiedName());
when(mockRequest.getParameter(Constants.REQ_PARAM_RESOURCE)).thenReturn(resourceShortName);
action.setServletRequest(mockRequest);
// ensure the resource is set
action.setResource(resource);
}
use of org.gbif.ipt.model.VocabularyTerm in project ipt by gbif.
the class MockVocabulariesManager method get.
/**
* Get one of the installed vocabularies by its unique identifier, populate its list of VocabularyConcept, and
* return it.
*/
@Override
public Vocabulary get(String uri) {
Vocabulary v = new Vocabulary();
Map<String, String> vocabMap = getI18nVocab(uri, Locale.getDefault().getDisplayLanguage(), false);
for (Map.Entry<String, String> stringStringEntry : vocabMap.entrySet()) {
VocabularyConcept concept = new VocabularyConcept();
concept.setIdentifier(stringStringEntry.getKey());
VocabularyTerm term = new VocabularyTerm();
term.setTitle(stringStringEntry.getValue());
term.setLang("en");
concept.addPreferredTerm(term);
v.addConcept(concept);
}
return v;
}
use of org.gbif.ipt.model.VocabularyTerm in project ipt by gbif.
the class VocabulariesManagerImpl method getI18nVocab.
@Override
public Map<String, String> getI18nVocab(String identifier, String lang, boolean sortAlphabetically) {
Map<String, String> map = new LinkedHashMap<>();
Vocabulary v = get(identifier);
if (v != null) {
List<VocabularyConcept> concepts;
if (sortAlphabetically) {
concepts = new ArrayList<>(v.getConcepts());
final String s = lang;
concepts.sort(Comparator.comparing(o -> (o.getPreferredTerm(s) == null ? o.getIdentifier() : o.getPreferredTerm(s).getTitle())));
} else {
concepts = v.getConcepts();
}
for (VocabularyConcept c : concepts) {
VocabularyTerm t = c.getPreferredTerm(lang);
map.put(c.getIdentifier(), t == null ? c.getIdentifier() : t.getTitle());
}
}
if (map.isEmpty()) {
LOG.error("Empty i18n map for vocabulary " + identifier + " and language " + lang);
}
return map;
}
Aggregations