use of org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue in project OpenRefine by OpenRefine.
the class EntityCacheTests method testGetAll.
@Test
public void testGetAll() throws MediaWikiApiErrorException, IOException, ExecutionException {
WikibaseDataFetcher fetcher = mock(WikibaseDataFetcher.class);
PropertyIdValue idA = Datamodel.makeWikidataPropertyIdValue("P42");
PropertyIdValue idB = Datamodel.makeWikidataPropertyIdValue("P43");
PropertyIdValue idC = Datamodel.makeWikidataPropertyIdValue("P44");
PropertyIdValue idD = Datamodel.makeWikidataPropertyIdValue("P45");
PropertyDocument docA = Datamodel.makePropertyDocument(idA, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_GEO_SHAPE));
PropertyDocument docB = Datamodel.makePropertyDocument(idB, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_GEO_SHAPE));
PropertyDocument docC = Datamodel.makePropertyDocument(idC, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_GEO_SHAPE));
PropertyDocument docD = Datamodel.makePropertyDocument(idD, Datamodel.makeDatatypeIdValue(DatatypeIdValue.DT_GEO_SHAPE));
EntityCache SUT = new EntityCache(fetcher);
List<String> entityIdListA = Arrays.asList(idA.getId(), idB.getId());
List<String> entityIdListB = Arrays.asList(idC.getId(), idD.getId());
List<String> entityIdListC = Arrays.asList(idB.getId(), idC.getId());
List<EntityDocument> docListA = Arrays.asList(docA, docB);
List<EntityDocument> docListB = Arrays.asList(docC, docD);
List<EntityDocument> docListC = Arrays.asList(docB, docC);
Map<String, EntityDocument> docMapA = new HashMap<>();
docMapA.put(idA.getId(), docA);
docMapA.put(idB.getId(), docB);
Map<String, EntityDocument> docMapB = new HashMap<>();
docMapB.put(idC.getId(), docC);
docMapB.put(idD.getId(), docD);
Map<String, EntityDocument> docMapC = new HashMap<>();
docMapC.put(idB.getId(), docB);
docMapC.put(idC.getId(), docC);
when(fetcher.getEntityDocuments(entityIdListA)).thenReturn(docMapA);
when(fetcher.getEntityDocuments(entityIdListB)).thenReturn(docMapB);
when(fetcher.getEntityDocuments(entityIdListC)).thenReturn(docMapC);
Assert.assertEquals(SUT.getMultipleDocuments(Arrays.asList(idA, idB)), docListA);
Assert.assertEquals(SUT.getMultipleDocuments(Arrays.asList(idC, idD)), docListB);
Assert.assertEquals(SUT.getMultipleDocuments(Arrays.asList(idB, idC)), docListC);
verify(fetcher, times(0)).getEntityDocuments(entityIdListC);
}
use of org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue in project OpenRefine by OpenRefine.
the class ScrutinizerTest method makeSnakGroupList.
public List<SnakGroup> makeSnakGroupList(Snak... snaks) {
Map<PropertyIdValue, List<Snak>> propertySnakMap = new HashMap<>();
for (Snak snak : snaks) {
PropertyIdValue pid = snak.getPropertyId();
List<Snak> snakList;
if (propertySnakMap.containsKey(pid)) {
snakList = propertySnakMap.get(pid);
} else {
snakList = new ArrayList<>();
}
snakList.add(snak);
propertySnakMap.put(pid, snakList);
}
List<SnakGroup> snakGroupList = new ArrayList<>();
for (List<Snak> snakList : propertySnakMap.values()) {
snakGroupList.add(Datamodel.makeSnakGroup(snakList));
}
return snakGroupList;
}
use of org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue in project OpenRefine by OpenRefine.
the class ScrutinizerTest method constraintParameterStatementList.
public List<Statement> constraintParameterStatementList(ItemIdValue itemIdValue, List<SnakGroup> listSnakGroup) {
PropertyIdValue propertyIdValue = Datamodel.makeWikidataPropertyIdValue("P2302");
Snak snakValue = Datamodel.makeValueSnak(propertyIdValue, itemIdValue);
Claim claim = Datamodel.makeClaim(itemIdValue, snakValue, listSnakGroup);
Reference reference = Datamodel.makeReference(listSnakGroup);
List<Reference> referenceList = Collections.singletonList(reference);
Statement statement = Datamodel.makeStatement(claim, referenceList, StatementRank.NORMAL, "P2302$77BD7FE4-C051-4776-855C-543F0CE697D0");
List<Statement> statements = Collections.singletonList(statement);
return statements;
}
use of org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue in project OpenRefine by OpenRefine.
the class ReconEntityRewriterTest method testRewritePropertyUpdateOnExistingEntity.
@Test
public void testRewritePropertyUpdateOnExistingEntity() throws NewEntityNotCreatedYetException {
PropertyIdValue subject = TestingData.matchedPropertyID;
rewriter = new ReconEntityRewriter(library, subject);
library.setId(7654L, "P1234");
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(subject).addStatement(TestingData.generateStatementAddition(subject, TestingData.newPropertyIdB)).addStatement(TestingData.generateStatementDeletion(subject, TestingData.existingPropertyId)).addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true).addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false).addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
TermedStatementEntityEdit rewritten = rewriter.rewrite(update);
TermedStatementEntityEdit expected = new TermedStatementEntityEditBuilder(subject).addStatement(TestingData.generateStatementAddition(subject, newlyCreatedProperty)).addStatement(TestingData.generateStatementDeletion(subject, TestingData.existingPropertyId)).addLabel(Datamodel.makeMonolingualTextValue("label", "de"), true).addDescription(Datamodel.makeMonolingualTextValue("beschreibung", "de"), false).addAlias(Datamodel.makeMonolingualTextValue("darstellung", "de")).build();
assertEquals(rewritten, expected);
}
use of org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue in project OpenRefine by OpenRefine.
the class QualifiersStatementMerger method merge.
@Override
public Statement merge(Statement existing, Statement added) {
List<SnakGroup> finalQualifiers = new ArrayList<>(existing.getQualifiers());
for (SnakGroup addedSnakGroup : added.getQualifiers()) {
PropertyIdValue pid = addedSnakGroup.getProperty();
// otherwise:
if (!pids.contains(pid.getId())) {
OptionalInt index = IntStream.range(0, finalQualifiers.size()).filter(i -> finalQualifiers.get(i).getProperty().getId().equals(pid.getId())).findFirst();
if (index.isEmpty()) {
finalQualifiers.add(addedSnakGroup);
} else {
finalQualifiers.set(index.getAsInt(), mergeSnakGroups(finalQualifiers.get(index.getAsInt()), addedSnakGroup));
}
}
}
List<Reference> allReferences = new ArrayList<>(existing.getReferences());
Set<Reference> seenReferences = new HashSet<>(existing.getReferences());
for (Reference reference : added.getReferences()) {
if (!seenReferences.contains(reference)) {
seenReferences.add(reference);
allReferences.add(reference);
}
}
Statement merged = Datamodel.makeStatement(Datamodel.makeClaim(existing.getSubject(), existing.getMainSnak(), finalQualifiers), allReferences, existing.getRank(), existing.getStatementId());
return merged;
}
Aggregations