use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class EntityTypeScrutinizerTest method testAllowed.
@Test
public void testAllowed() {
ItemIdValue idA = TestingData.existingId;
ValueSnak mainValueSnak = Datamodel.makeValueSnak(propertyIdValue, propertyValue);
Statement statement = new StatementImpl("P2302", mainValueSnak, idA);
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, allowedValue);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
SnakGroup qualifierSnakGroup = Datamodel.makeSnakGroup(qualifierSnakList);
List<SnakGroup> constraintQualifiers = Collections.singletonList(qualifierSnakGroup);
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, constraintQualifiers);
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, ALLOWED_ENTITY_TYPES_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(update);
assertNoWarningRaised();
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class InverseConstraintScrutinizerTest method testTrigger.
@Test
public void testTrigger() {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyId, propertyValue);
Statement statement = new StatementImpl("P25", mainSnak, idA);
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyParameter, inversePropertyID);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
SnakGroup qualifierSnakGroup = Datamodel.makeSnakGroup(qualifierSnakList);
List<SnakGroup> constraintQualifiers = Collections.singletonList(qualifierSnakGroup);
List<Statement> constraintDefinitions = constraintParameterStatementList(inverseEntityIdValue, constraintQualifiers);
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyId, INVERSE_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(update);
assertWarningsRaised(InverseConstraintScrutinizer.type);
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class InverseConstraintScrutinizerTest method testNoSymmetricClosure.
@Test
public void testNoSymmetricClosure() {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeSomeValueSnak(propertyId);
Statement statement = new StatementImpl("P25", mainSnak, idA);
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
Snak qualifierSnak = Datamodel.makeValueSnak(propertyParameter, inverseEntityIdValue);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
SnakGroup qualifierSnakGroup = Datamodel.makeSnakGroup(qualifierSnakList);
List<SnakGroup> constraintQualifiers = Collections.singletonList(qualifierSnakGroup);
List<Statement> constraintDefinitions = constraintParameterStatementList(inverseEntityIdValue, constraintQualifiers);
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyId, INVERSE_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(update);
assertNoWarningRaised();
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class MultiValueScrutinizerTest method testExistingItemTrigger.
@Test
public void testExistingItemTrigger() {
ItemIdValue idA = TestingData.existingId;
ItemIdValue idB = TestingData.matchedId;
Snak mainSnakValue = Datamodel.makeValueSnak(propertyIdValue, valueSnak);
Statement statement = new StatementImpl("P1963", mainSnakValue, idA);
TermedStatementEntityEdit updateA = new TermedStatementEntityEditBuilder(idA).addStatement(add(TestingData.generateStatement(idA, idB))).addStatement(add(statement)).build();
TermedStatementEntityEdit updateB = new TermedStatementEntityEditBuilder(idB).addStatement(add(TestingData.generateStatement(idB, idB))).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, MULTI_VALUE_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(updateA, updateB);
assertWarningsRaised(MultiValueScrutinizer.existing_type);
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class EditBatchProcessorTest method testMultipleBatchesMediaInfo.
@Test
public void testMultipleBatchesMediaInfo() throws MediaWikiApiErrorException, InterruptedException, IOException {
// Prepare test data
MonolingualTextValue label = Datamodel.makeMonolingualTextValue("village in Nepal", "en");
List<MonolingualTextValue> labels = Collections.singletonList(label);
TermUpdate labelsUpdate = Datamodel.makeTermUpdate(labels, Collections.emptyList());
List<String> ids = new ArrayList<>();
for (int i = 124; i < 190; i++) {
ids.add("M" + String.valueOf(i));
}
List<MediaInfoIdValue> mids = ids.stream().map(e -> Datamodel.makeWikimediaCommonsMediaInfoIdValue(e)).collect(Collectors.toList());
List<TermedStatementEntityEdit> batch = mids.stream().map(mid -> new TermedStatementEntityEditBuilder(mid).addLabel(label, false).build()).collect(Collectors.toList());
int batchSize = 50;
List<MediaInfoDocument> fullBatch = mids.stream().map(mid -> Datamodel.makeMediaInfoDocument(mid)).collect(Collectors.toList());
List<MediaInfoDocument> firstBatch = fullBatch.subList(0, batchSize);
List<MediaInfoDocument> secondBatch = fullBatch.subList(batchSize, fullBatch.size());
when(fetcher.getEntityDocuments(toMids(firstBatch))).thenReturn(toMapMediaInfo(firstBatch));
when(fetcher.getEntityDocuments(toMids(secondBatch))).thenReturn(toMapMediaInfo(secondBatch));
// Run edits
EditBatchProcessor processor = new EditBatchProcessor(fetcher, editor, batch, library, summary, maxlag, tags, batchSize);
assertEquals(0, processor.progress());
for (int i = 124; i < 190; i++) {
assertEquals(processor.remainingEdits(), 190 - i);
processor.performEdit();
}
assertEquals(0, processor.remainingEdits());
assertEquals(100, processor.progress());
// Check result
assertEquals(new NewEntityLibrary(), library);
verify(fetcher, times(1)).getEntityDocuments(toMids(firstBatch));
verify(fetcher, times(1)).getEntityDocuments(toMids(secondBatch));
for (MediaInfoDocument doc : fullBatch) {
StatementUpdate statementUpdate = Datamodel.makeStatementUpdate(Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
verify(editor, times(1)).editEntityDocument(Datamodel.makeMediaInfoUpdate((MediaInfoIdValue) doc.getEntityId(), doc.getRevisionId(), labelsUpdate, statementUpdate), false, summary, tags);
}
}
Aggregations