use of org.wikidata.wdtk.datamodel.interfaces.SnakGroup in project OpenRefine by OpenRefine.
the class SnakOnlyStatementMerger method merge.
@Override
public Statement merge(Statement existing, Statement added) {
List<SnakGroup> existingQualifiers = existing.getQualifiers();
List<SnakGroup> addedQualifiers = added.getQualifiers();
// flatten snak groups
List<Snak> existingSnaks = flatten(existingQualifiers);
List<Snak> addedSnaks = flatten(addedQualifiers);
List<Snak> mergedSnaks = new ArrayList<>(existingSnaks);
for (Snak addedSnak : addedSnaks) {
boolean matchingSnakFound = mergedSnaks.stream().anyMatch(existingSnak -> match(existingSnak, addedSnak));
if (!matchingSnakFound) {
mergedSnaks.add(addedSnak);
}
}
List<SnakGroup> groupedQualifiers = WbStatementExpr.groupSnaks(mergedSnaks);
Claim newClaim = Datamodel.makeClaim(existing.getSubject(), existing.getMainSnak(), groupedQualifiers);
List<Reference> references = mergeReferences(existing.getReferences(), added.getReferences());
return Datamodel.makeStatement(newClaim, references, existing.getRank(), existing.getStatementId());
}
use of org.wikidata.wdtk.datamodel.interfaces.SnakGroup in project OpenRefine by OpenRefine.
the class EntityTypeScrutinizer method scrutinize.
@Override
public void scrutinize(Snak snak, EntityIdValue entityId, boolean added) {
if (!added) {
return;
}
PropertyIdValue pid = snak.getPropertyId();
List<Statement> statementList = _fetcher.getConstraintsByType(pid, allowedEntityTypesQid);
if (!statementList.isEmpty()) {
List<SnakGroup> constraint = statementList.get(0).getClaim().getQualifiers();
boolean isUsable = true;
if (constraint != null) {
isUsable = findValues(constraint, itemOfPropertyConstraint).contains(Datamodel.makeWikidataItemIdValue(wikibaseItemQid));
}
if (!isUsable) {
QAWarning issue = new QAWarning(type, null, QAWarning.Severity.WARNING, 1);
issue.setProperty("property_entity", pid);
issue.setProperty("example_entity", entityId);
addIssue(issue);
}
}
}
use of org.wikidata.wdtk.datamodel.interfaces.SnakGroup in project OpenRefine by OpenRefine.
the class QuickStatementsExporter method translateStatement.
protected void translateStatement(String qid, Statement statement, String pid, boolean add, Writer writer) throws IOException {
Claim claim = statement.getClaim();
Snak mainSnak = claim.getMainSnak();
String mainSnakQS = mainSnak.accept(mainSnakPrinter);
if (!add) {
// According to: https://www.wikidata.org/wiki/Help:QuickStatements#Removing_statements,
// Removing statements won't be followed by qualifiers or references.
writer.write("- ");
writer.write(qid + mainSnakQS);
writer.write("\n");
} else {
// add statements
if (statement.getReferences().isEmpty()) {
writer.write(qid + mainSnakQS);
for (SnakGroup q : claim.getQualifiers()) {
translateSnakGroup(q, false, writer);
}
writer.write("\n");
} else {
// So, to handle multiple references, we can duplicate the statement just with different references.
for (Reference r : statement.getReferences()) {
writer.write(qid + mainSnakQS);
for (SnakGroup q : claim.getQualifiers()) {
translateSnakGroup(q, false, writer);
}
for (SnakGroup g : r.getSnakGroups()) {
translateSnakGroup(g, true, writer);
}
writer.write("\n");
}
}
}
}
use of org.wikidata.wdtk.datamodel.interfaces.SnakGroup in project OpenRefine by OpenRefine.
the class UseAsQualifierScrutinizer method scrutinize.
@Override
public void scrutinize(TermedStatementEntityEdit update) {
for (Statement statement : update.getAddedStatements()) {
PropertyIdValue pid = statement.getClaim().getMainSnak().getPropertyId();
Map<PropertyIdValue, List<Value>> qualifiersMap = new HashMap<>();
List<SnakGroup> qualifiersList = statement.getClaim().getQualifiers();
for (SnakGroup qualifier : qualifiersList) {
PropertyIdValue qualifierPid = qualifier.getProperty();
List<Value> itemList;
for (Snak snak : qualifier.getSnaks()) {
if (!(snak instanceof ValueSnak)) {
continue;
}
if (qualifiersMap.containsKey(qualifierPid)) {
itemList = qualifiersMap.get(qualifierPid);
} else {
itemList = new ArrayList<>();
}
itemList.add(((ValueSnak) snak).getValue());
qualifiersMap.put(qualifierPid, itemList);
}
}
List<Statement> constraintDefinitions = _fetcher.getConstraintsByType(pid, oneOfQualifierValuePropertyQid);
for (Statement constraintStatement : constraintDefinitions) {
UseAsQualifierConstraint constraint = new UseAsQualifierConstraint(constraintStatement);
if (qualifiersMap.containsKey(constraint.allowedQualifierPid)) {
for (Value value : qualifiersMap.get(constraint.allowedQualifierPid)) {
if (!constraint.itemList.contains(value)) {
QAWarning issue = new QAWarning(type, pid.getId() + constraint.allowedQualifierPid.getId(), QAWarning.Severity.WARNING, 1);
issue.setProperty("statement_entity", pid);
issue.setProperty("qualifier_entity", constraint.allowedQualifierPid);
issue.setProperty("example_entity", update.getEntityId());
addIssue(issue);
}
}
}
}
}
}
Aggregations