use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class QuantityScrutinizerTest method testFractionalAllowed.
@Test
public void testFractionalAllowed() {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, exactValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, INTEGER_VALUED_CONSTRAINT_QID)).thenReturn(new ArrayList<>());
setFetcher(fetcher);
scrutinize(update);
assertNoWarningRaised();
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class QuantityScrutinizerTest method testUnitReqired.
@Test
public void testUnitReqired() {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, integerValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
Snak qualifierSnak = Datamodel.makeValueSnak(itemParameterPID, allowedUnit);
List<Snak> qualifierSnakList = Collections.singletonList(qualifierSnak);
SnakGroup snakGroup1 = Datamodel.makeSnakGroup(qualifierSnakList);
List<SnakGroup> constraintQualifiers = Collections.singletonList(snakGroup1);
List<Statement> constraintDefinitions = constraintParameterStatementList(allowedUnitEntity, constraintQualifiers);
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, ALLOWED_UNITS_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(update);
assertWarningsRaised(QuantityScrutinizer.noUnitProvidedType);
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class QuantityScrutinizerTest method testUnitForbidden.
@Test
public void testUnitForbidden() {
ItemIdValue idA = TestingData.existingId;
Snak mainSnak = Datamodel.makeValueSnak(propertyIdValue, goodUnitValue);
Statement statement = new StatementImpl("P1083", mainSnak, idA);
TermedStatementEntityEdit update = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
List<Statement> constraintDefinitions = constraintParameterStatementList(allowedUnitEntity, new ArrayList<>());
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyIdValue, ALLOWED_UNITS_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(update);
assertWarningsRaised(QuantityScrutinizer.invalidUnitType);
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class QuickStatementsUpdateScheduler method splitUpdate.
/**
* Separates out the statements which refer to new entities from the rest of the
* update. The resulting updates are stored in {@link referencingUpdates} and
* {@link updatesWithoutReferences}.
*
* @param update
* @throws ImpossibleSchedulingException
* if two new entity ids are referred to in the same statement
*/
protected void splitUpdate(TermedStatementEntityEdit update) throws ImpossibleSchedulingException {
TermedStatementEntityEditBuilder remainingUpdateBuilder = new TermedStatementEntityEditBuilder(update.getEntityId()).addLabels(update.getLabels(), true).addLabels(update.getLabelsIfNew(), false).addDescriptions(update.getDescriptions(), true).addDescriptions(update.getDescriptionsIfNew(), false).addAliases(update.getAliases());
Map<EntityIdValue, TermedStatementEntityEditBuilder> referencingUpdates = new HashMap<>();
for (StatementEdit statement : update.getStatementEdits()) {
Set<ReconEntityIdValue> pointers = extractor.extractPointers(statement.getStatement());
if (pointers.isEmpty()) {
remainingUpdateBuilder.addStatement(statement);
} else if (pointers.size() == 1 && !update.isNew()) {
EntityIdValue pointer = pointers.stream().findFirst().get();
TermedStatementEntityEditBuilder referencingBuilder = referencingUpdates.get(pointer);
if (referencingBuilder == null) {
referencingBuilder = new TermedStatementEntityEditBuilder(update.getEntityId());
}
referencingBuilder.addStatement(statement);
referencingUpdates.put(pointer, referencingBuilder);
} else if (pointers.size() == 1 && pointers.stream().findFirst().get().equals(update.getEntityId())) {
remainingUpdateBuilder.addStatement(statement);
} else {
throw new ImpossibleSchedulingException();
}
}
// Add the update that is not referring to anything to the schedule
TermedStatementEntityEdit pointerFree = remainingUpdateBuilder.build();
if (!pointerFree.isNull()) {
pointerFreeUpdates.add(pointerFree);
}
// Add the other updates to the map
for (Entry<EntityIdValue, TermedStatementEntityEditBuilder> entry : referencingUpdates.entrySet()) {
TermedStatementEntityEdit pointerUpdate = entry.getValue().build();
UpdateSequence pointerUpdatesForKey = pointerUpdates.get(entry.getKey());
if (pointerUpdatesForKey == null) {
pointerUpdatesForKey = new UpdateSequence();
}
pointerUpdatesForKey.add(pointerUpdate);
pointerUpdates.put(entry.getKey(), pointerUpdatesForKey);
}
}
use of org.openrefine.wikidata.updates.TermedStatementEntityEditBuilder in project OpenRefine by OpenRefine.
the class QuickStatementsUpdateScheduler method schedule.
@Override
public List<TermedStatementEntityEdit> schedule(List<TermedStatementEntityEdit> updates) throws ImpossibleSchedulingException {
pointerUpdates = new HashMap<>();
pointerFreeUpdates = new UpdateSequence();
for (TermedStatementEntityEdit update : updates) {
splitUpdate(update);
}
// Reconstruct
List<TermedStatementEntityEdit> fullSchedule = new ArrayList<>();
Set<EntityIdValue> mentionedNewEntities = new HashSet<>(pointerUpdates.keySet());
for (TermedStatementEntityEdit update : pointerFreeUpdates.getUpdates()) {
fullSchedule.add(update);
UpdateSequence backPointers = pointerUpdates.get(update.getEntityId());
if (backPointers != null) {
fullSchedule.addAll(backPointers.getUpdates());
}
mentionedNewEntities.remove(update.getEntityId());
}
// as the entities would remain blank in this batch).
for (EntityIdValue missingId : mentionedNewEntities) {
fullSchedule.add(new TermedStatementEntityEditBuilder(missingId).build());
fullSchedule.addAll(pointerUpdates.get(missingId).getUpdates());
}
return fullSchedule;
}
Aggregations