use of org.wikidata.wdtk.datamodel.interfaces.ValueSnak in project OpenRefine by OpenRefine.
the class ConflictsWithScrutinizerTest method testNoStatement.
@Test
public void testNoStatement() {
ItemIdValue idA = TestingData.existingId;
ValueSnak valueSnak = Datamodel.makeValueSnak(propertyWithConflictsPid1, conflictingValue1);
Statement statement = new StatementImpl("P31", valueSnak, idA);
TermedStatementEntityEdit updateA = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement)).build();
List<Statement> constraintDefinitions = new ArrayList<>();
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(propertyWithConflictsPid1, CONFLICTS_WITH_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(updateA);
assertNoWarningRaised();
}
use of org.wikidata.wdtk.datamodel.interfaces.ValueSnak in project OpenRefine by OpenRefine.
the class ConflictsWithScrutinizerTest method testTrigger.
@Test
public void testTrigger() {
ItemIdValue idA = TestingData.existingId;
ValueSnak value1 = Datamodel.makeValueSnak(conflictsWithPid, conflictsWithValue);
ValueSnak value2 = Datamodel.makeValueSnak(propertyWithConflictsPid1, conflictingValue1);
Statement statement1 = new StatementImpl("P2002", value1, idA);
Statement statement2 = new StatementImpl("P31", value2, idA);
TermedStatementEntityEdit updateA = new TermedStatementEntityEditBuilder(idA).addStatement(add(statement1)).addStatement(add(statement2)).build();
Snak snak1 = Datamodel.makeValueSnak(propertyParameterPID, conflictingPropertyValue1);
Snak snak2 = Datamodel.makeValueSnak(itemParameterPID, conflictingItemValue1);
List<Snak> snakList1 = Collections.singletonList(snak1);
List<Snak> snakList2 = Collections.singletonList(snak2);
SnakGroup snakGroup1 = Datamodel.makeSnakGroup(snakList1);
SnakGroup snakGroup2 = Datamodel.makeSnakGroup(snakList2);
List<SnakGroup> constraintQualifiers = Arrays.asList(snakGroup1, snakGroup2);
List<Statement> constraintDefinitions = constraintParameterStatementList(entityIdValue, constraintQualifiers);
ConstraintFetcher fetcher = mock(ConstraintFetcher.class);
when(fetcher.getConstraintsByType(conflictsWithPid, CONFLICTS_WITH_CONSTRAINT_QID)).thenReturn(constraintDefinitions);
setFetcher(fetcher);
scrutinize(updateA);
assertWarningsRaised(ConflictsWithScrutinizer.type);
}
use of org.wikidata.wdtk.datamodel.interfaces.ValueSnak in project OpenRefine by OpenRefine.
the class SnakOnlyStatementMerger method match.
/**
* Matches two snaks using the underlying value matcher.
* The snaks must have the same property id to match.
*
* @param existingSnak
* @param addedSnak
* @return
*/
public boolean match(Snak existingSnak, Snak addedSnak) {
// Deliberately only comparing the pids and not the siteIRIs to avoid spurious mismatches due to federation
if (!existingSnak.getPropertyId().getId().equals(addedSnak.getPropertyId().getId())) {
return false;
} else if (existingSnak instanceof NoValueSnak && addedSnak instanceof NoValueSnak) {
return true;
} else if (existingSnak instanceof SomeValueSnak && addedSnak instanceof SomeValueSnak) {
return true;
} else {
Value existingValue = ((ValueSnak) existingSnak).getValue();
Value addedValue = ((ValueSnak) addedSnak).getValue();
return valueMatcher.match(existingValue, addedValue);
}
}
use of org.wikidata.wdtk.datamodel.interfaces.ValueSnak in project OpenRefine by OpenRefine.
the class ConflictsWithScrutinizer method scrutinize.
@Override
public void scrutinize(TermedStatementEntityEdit update) {
Map<PropertyIdValue, Set<Value>> propertyIdValueValueMap = new HashMap<>();
for (Statement statement : update.getAddedStatements()) {
PropertyIdValue pid = statement.getClaim().getMainSnak().getPropertyId();
Value value = null;
Snak mainSnak = statement.getClaim().getMainSnak();
if (mainSnak instanceof ValueSnak) {
value = ((ValueSnak) mainSnak).getValue();
}
Set<Value> values;
if (value != null) {
if (propertyIdValueValueMap.containsKey(pid)) {
values = propertyIdValueValueMap.get(pid);
} else {
values = new HashSet<>();
}
values.add(value);
propertyIdValueValueMap.put(pid, values);
}
}
for (PropertyIdValue propertyId : propertyIdValueValueMap.keySet()) {
List<Statement> statementList = _fetcher.getConstraintsByType(propertyId, conflictsWithConstraintQid);
for (Statement statement : statementList) {
ConflictsWithConstraint constraint = new ConflictsWithConstraint(statement);
PropertyIdValue conflictingPid = constraint.conflictingPid;
List<Value> itemList = constraint.itemList;
if (propertyIdValueValueMap.containsKey(conflictingPid) && raiseWarning(propertyIdValueValueMap, conflictingPid, itemList)) {
QAWarning issue = new QAWarning(type, propertyId.getId() + conflictingPid.getId(), QAWarning.Severity.WARNING, 1);
issue.setProperty("property_entity", propertyId);
issue.setProperty("added_property_entity", conflictingPid);
issue.setProperty("example_entity", update.getEntityId());
addIssue(issue);
}
}
}
}
use of org.wikidata.wdtk.datamodel.interfaces.ValueSnak in project OpenRefine by OpenRefine.
the class InverseConstraintScrutinizer method scrutinize.
@Override
public void scrutinize(Statement statement, EntityIdValue entityId, boolean added) {
if (!added) {
// TODO support for deleted statements
return;
}
Snak mainSnak = statement.getClaim().getMainSnak();
if (!(mainSnak instanceof ValueSnak)) {
return;
}
Value mainSnakValue = ((ValueSnak) mainSnak).getValue();
if (mainSnakValue instanceof ItemIdValue) {
PropertyIdValue pid = mainSnak.getPropertyId();
PropertyIdValue inversePid = getInverseConstraint(pid);
if (inversePid != null) {
EntityIdValue targetEntityId = (EntityIdValue) mainSnakValue;
Set<EntityIdValue> currentValues = _statements.get(pid).get(entityId);
if (currentValues == null) {
currentValues = new HashSet<EntityIdValue>();
_statements.get(pid).put(entityId, currentValues);
}
currentValues.add(targetEntityId);
}
}
}
Aggregations