use of org.wikidata.wdtk.datamodel.interfaces.StatementUpdate in project OpenRefine by OpenRefine.
the class StatementGroupEditTest method testAddOrMergeStatementsAllMatching.
@Test
public void testAddOrMergeStatementsAllMatching() {
when(merger.match(statement1, statement)).thenReturn(true);
when(merger.match(statement2, statement)).thenReturn(true);
when(merger.merge(statement1, statement)).thenReturn(statement1);
StatementEdit statementEdit = new StatementEdit(statement, merger, StatementEditingMode.ADD_OR_MERGE);
StatementGroupEdit SUT = new StatementGroupEdit(Collections.singletonList(statementEdit));
SUT.contributeToStatementUpdate(builder, statementGroup);
StatementUpdate statementUpdate = builder.build();
assertEquals(statementUpdate.getAdded(), Collections.emptyList());
assertEquals(statementUpdate.getReplaced(), Collections.singletonMap(statementId1, statement1));
assertEquals(statementUpdate.getRemoved(), Collections.emptySet());
}
use of org.wikidata.wdtk.datamodel.interfaces.StatementUpdate in project OpenRefine by OpenRefine.
the class StatementGroupEditTest method testAddStatementsMatching.
@Test
public void testAddStatementsMatching() {
when(merger.match(statement1, statement)).thenReturn(true);
when(merger.match(statement2, statement)).thenReturn(false);
StatementEdit statementEdit = new StatementEdit(statement, merger, StatementEditingMode.ADD);
StatementGroupEdit SUT = new StatementGroupEdit(Collections.singletonList(statementEdit));
SUT.contributeToStatementUpdate(builder, statementGroup);
StatementUpdate statementUpdate = builder.build();
assertEquals(statementUpdate.getAdded(), Collections.emptyList());
assertEquals(statementUpdate.getReplaced(), Collections.emptyMap());
assertEquals(statementUpdate.getRemoved(), Collections.emptySet());
}
use of org.wikidata.wdtk.datamodel.interfaces.StatementUpdate in project OpenRefine by OpenRefine.
the class StatementGroupEditTest method testAddOrMergeStatementsNoMatching.
@Test
public void testAddOrMergeStatementsNoMatching() {
when(merger.match(statement1, statement)).thenReturn(false);
when(merger.match(statement2, statement)).thenReturn(false);
StatementEdit statementEdit = new StatementEdit(statement, merger, StatementEditingMode.ADD_OR_MERGE);
StatementGroupEdit SUT = new StatementGroupEdit(Collections.singletonList(statementEdit));
SUT.contributeToStatementUpdate(builder, statementGroup);
StatementUpdate statementUpdate = builder.build();
assertEquals(statementUpdate.getAdded(), Collections.singletonList(statement));
assertEquals(statementUpdate.getReplaced(), Collections.emptyMap());
assertEquals(statementUpdate.getRemoved(), Collections.emptySet());
}
use of org.wikidata.wdtk.datamodel.interfaces.StatementUpdate in project OpenRefine by OpenRefine.
the class EditBatchProcessorTest method testMultipleBatchesMediaInfo.
@Test
public void testMultipleBatchesMediaInfo() throws MediaWikiApiErrorException, InterruptedException, IOException {
// Prepare test data
MonolingualTextValue label = Datamodel.makeMonolingualTextValue("village in Nepal", "en");
List<MonolingualTextValue> labels = Collections.singletonList(label);
TermUpdate labelsUpdate = Datamodel.makeTermUpdate(labels, Collections.emptyList());
List<String> ids = new ArrayList<>();
for (int i = 124; i < 190; i++) {
ids.add("M" + String.valueOf(i));
}
List<MediaInfoIdValue> mids = ids.stream().map(e -> Datamodel.makeWikimediaCommonsMediaInfoIdValue(e)).collect(Collectors.toList());
List<TermedStatementEntityEdit> batch = mids.stream().map(mid -> new TermedStatementEntityEditBuilder(mid).addLabel(label, false).build()).collect(Collectors.toList());
int batchSize = 50;
List<MediaInfoDocument> fullBatch = mids.stream().map(mid -> Datamodel.makeMediaInfoDocument(mid)).collect(Collectors.toList());
List<MediaInfoDocument> firstBatch = fullBatch.subList(0, batchSize);
List<MediaInfoDocument> secondBatch = fullBatch.subList(batchSize, fullBatch.size());
when(fetcher.getEntityDocuments(toMids(firstBatch))).thenReturn(toMapMediaInfo(firstBatch));
when(fetcher.getEntityDocuments(toMids(secondBatch))).thenReturn(toMapMediaInfo(secondBatch));
// Run edits
EditBatchProcessor processor = new EditBatchProcessor(fetcher, editor, batch, library, summary, maxlag, tags, batchSize);
assertEquals(0, processor.progress());
for (int i = 124; i < 190; i++) {
assertEquals(processor.remainingEdits(), 190 - i);
processor.performEdit();
}
assertEquals(0, processor.remainingEdits());
assertEquals(100, processor.progress());
// Check result
assertEquals(new NewEntityLibrary(), library);
verify(fetcher, times(1)).getEntityDocuments(toMids(firstBatch));
verify(fetcher, times(1)).getEntityDocuments(toMids(secondBatch));
for (MediaInfoDocument doc : fullBatch) {
StatementUpdate statementUpdate = Datamodel.makeStatementUpdate(Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
verify(editor, times(1)).editEntityDocument(Datamodel.makeMediaInfoUpdate((MediaInfoIdValue) doc.getEntityId(), doc.getRevisionId(), labelsUpdate, statementUpdate), false, summary, tags);
}
}
use of org.wikidata.wdtk.datamodel.interfaces.StatementUpdate in project OpenRefine by OpenRefine.
the class TermedStatementEntityEdit method toEntityUpdate.
/**
* In case the subject id is not new, returns the corresponding update given
* the current state of the entity.
*/
public EntityUpdate toEntityUpdate(EntityDocument entityDocument) {
Validate.isFalse(isNew(), "Cannot create a corresponding entity update for a creation of a new entity.");
if (id instanceof ItemIdValue) {
ItemDocument itemDocument = (ItemDocument) entityDocument;
// Labels
List<MonolingualTextValue> labels = getLabels().stream().collect(Collectors.toList());
labels.addAll(getLabelsIfNew().stream().filter(label -> !itemDocument.getLabels().containsKey(label.getLanguageCode())).collect(Collectors.toList()));
TermUpdate labelUpdate = Datamodel.makeTermUpdate(labels, Collections.emptyList());
// Descriptions
List<MonolingualTextValue> descriptions = getDescriptions().stream().collect(Collectors.toList());
descriptions.addAll(getDescriptionsIfNew().stream().filter(desc -> !itemDocument.getDescriptions().containsKey(desc.getLanguageCode())).collect(Collectors.toList()));
TermUpdate descriptionUpdate = Datamodel.makeTermUpdate(descriptions, Collections.emptyList());
// Aliases
Set<MonolingualTextValue> aliases = getAliases();
Map<String, List<MonolingualTextValue>> aliasesMap = aliases.stream().collect(Collectors.groupingBy(MonolingualTextValue::getLanguageCode));
Map<String, AliasUpdate> aliasMap = aliasesMap.entrySet().stream().collect(Collectors.toMap(Entry::getKey, e -> Datamodel.makeAliasUpdate(e.getValue(), Collections.emptyList())));
// Statements
StatementUpdate statementUpdate = toStatementUpdate(itemDocument);
return Datamodel.makeItemUpdate((ItemIdValue) getEntityId(), entityDocument.getRevisionId(), labelUpdate, descriptionUpdate, aliasMap, statementUpdate, Collections.emptyList(), Collections.emptyList());
} else if (id instanceof MediaInfoIdValue) {
MediaInfoDocument mediaInfoDocument = (MediaInfoDocument) entityDocument;
// Labels (captions)
List<MonolingualTextValue> labels = getLabels().stream().collect(Collectors.toList());
labels.addAll(getLabelsIfNew().stream().filter(label -> !mediaInfoDocument.getLabels().containsKey(label.getLanguageCode())).collect(Collectors.toList()));
TermUpdate labelUpdate = Datamodel.makeTermUpdate(labels, Collections.emptyList());
// Statements
StatementUpdate statementUpdate = toStatementUpdate(mediaInfoDocument);
return Datamodel.makeMediaInfoUpdate((MediaInfoIdValue) id, entityDocument.getRevisionId(), labelUpdate, statementUpdate);
} else {
throw new NotImplementedException("Editing entities of type " + id.getEntityType() + " is not supported yet.");
}
}
Aggregations