use of de.tudarmstadt.ukp.clarin.webanno.curation.casdiff.internal.AID in project webanno by webanno.
the class CasDiff method addConfiguration.
private void addConfiguration(ConfigurationSet aSet, String aCasGroupId, FeatureStructure aFS) {
if (aFS instanceof SofaFS) {
return;
}
if (aSet.position.getFeature() == null) {
// Check if this configuration is already present
Configuration configuration = null;
for (Configuration cfg : aSet.configurations) {
// Handle main positions
if (equalsFS(cfg.getRepresentative(cases), aFS)) {
configuration = cfg;
break;
}
}
// Not found, add new one
if (configuration == null) {
configuration = new Configuration(aSet.position);
aSet.configurations.add(configuration);
}
configuration.add(aCasGroupId, aFS);
} else {
Feature feat = aFS.getType().getFeatureByBaseName(aSet.position.getFeature());
// configurations for it.
if (feat == null) {
return;
}
// For each slot at the given position in the FS-to-be-added, we need find a
// corresponding configuration
ArrayFS links = (ArrayFS) aFS.getFeatureValue(feat);
for (int i = 0; i < links.size(); i++) {
FeatureStructure link = links.get(i);
DiffAdapter adapter = getAdapter(aFS.getType().getName());
LinkFeatureDecl decl = adapter.getLinkFeature(aSet.position.getFeature());
// Check if this configuration is already present
Configuration configuration = null;
switch(aSet.position.getLinkCompareBehavior()) {
case LINK_TARGET_AS_LABEL:
{
String role = link.getStringValue(link.getType().getFeatureByBaseName(decl.getRoleFeature()));
if (!role.equals(aSet.position.getRole())) {
continue;
}
AnnotationFS target = (AnnotationFS) link.getFeatureValue(link.getType().getFeatureByBaseName(decl.getTargetFeature()));
cfgLoop: for (Configuration cfg : aSet.configurations) {
FeatureStructure repFS = cfg.getRepresentative(cases);
AID repAID = cfg.getRepresentativeAID();
FeatureStructure repLink = ((ArrayFS) repFS.getFeatureValue(repFS.getType().getFeatureByBaseName(decl.getName()))).get(repAID.index);
AnnotationFS repTarget = (AnnotationFS) repLink.getFeatureValue(repLink.getType().getFeatureByBaseName(decl.getTargetFeature()));
// Compare targets
if (equalsAnnotationFS(repTarget, target)) {
configuration = cfg;
break cfgLoop;
}
}
break;
}
case LINK_ROLE_AS_LABEL:
{
AnnotationFS target = (AnnotationFS) link.getFeatureValue(link.getType().getFeatureByBaseName(decl.getTargetFeature()));
if (!(target.getBegin() == aSet.position.getLinkTargetBegin() && target.getEnd() == aSet.position.getLinkTargetEnd())) {
continue;
}
String role = link.getStringValue(link.getType().getFeatureByBaseName(decl.getRoleFeature()));
cfgLoop: for (Configuration cfg : aSet.configurations) {
FeatureStructure repFS = cfg.getRepresentative(cases);
AID repAID = cfg.getRepresentativeAID();
FeatureStructure repLink = ((ArrayFS) repFS.getFeatureValue(repFS.getType().getFeatureByBaseName(decl.getName()))).get(repAID.index);
String linkRole = repLink.getStringValue(repLink.getType().getFeatureByBaseName(decl.getRoleFeature()));
// Compare roles
if (role.equals(linkRole)) {
configuration = cfg;
break cfgLoop;
}
}
break;
}
default:
throw new IllegalStateException("Unknown link target comparison mode [" + linkCompareBehavior + "]");
}
// Not found, add new one
if (configuration == null) {
configuration = new Configuration(aSet.position);
aSet.configurations.add(configuration);
}
configuration.add(aCasGroupId, aFS, aSet.position.getFeature(), i);
}
}
aSet.casGroupIds.add(aCasGroupId);
}
use of de.tudarmstadt.ukp.clarin.webanno.curation.casdiff.internal.AID in project webanno by webanno.
the class CasMerge method reMergeCas.
/**
* Using {@code DiffResult}, determine the annotations to be deleted from the randomly generated
* MergeCase. The initial Merge CAs is stored under a name {@code CurationPanel#CURATION_USER}.
* <p>
* Any similar annotations stacked in a {@code CasDiff2.Position} will be assumed a difference
* <p>
* Any two annotation with different value will be assumed a difference
* <p>
* Any non stacked empty/null annotations are assumed agreement
* <p>
* Any non stacked annotations with similar values for each of the features are assumed
* agreement
* <p>
* Any two link mode / slotable annotations which agree on the base features are assumed
* agreement
*
* @param aDiff
* the {@link DiffResult}
* @param aCases
* a map of {@code CAS}s for each users and the random merge
*/
public void reMergeCas(DiffResult aDiff, SourceDocument aTargetDocument, String aTargetUsername, CAS aTargetCas, Map<String, CAS> aCases) throws AnnotationException, UIMAException {
silenceEvents = true;
int updated = 0;
int created = 0;
Set<LogMessage> messages = new LinkedHashSet<>();
// Remove any annotations from the target CAS - keep type system, sentences and tokens
clearAnnotations(aTargetCas);
// If there is nothing to merge, bail out
if (aCases.isEmpty()) {
return;
}
// Set up a cache for resolving type to layer to avoid hammering the DB as we process each
// position
Map<String, AnnotationLayer> type2layer = aDiff.getPositions().stream().map(Position::getType).distinct().map(type -> schemaService.findLayer(aTargetDocument.getProject(), type)).collect(toMap(AnnotationLayer::getName, identity()));
List<String> layerNames = new ArrayList<>(type2layer.keySet());
// Move token layer to front
if (layerNames.contains(Token.class.getName())) {
layerNames.remove(Token.class.getName());
layerNames.add(0, Token.class.getName());
}
// Move sentence layer to front
if (layerNames.contains(Sentence.class.getName())) {
layerNames.remove(Sentence.class.getName());
layerNames.add(0, Sentence.class.getName());
}
// and sentences before the others)
for (String layerName : layerNames) {
List<SpanPosition> positions = aDiff.getPositions().stream().filter(pos -> layerName.equals(pos.getType())).filter(pos -> pos instanceof SpanPosition).map(pos -> (SpanPosition) pos).filter(pos -> pos.getFeature() == null).collect(Collectors.toList());
if (positions.isEmpty()) {
continue;
}
LOG.debug("Processing {} span positions on layer {}", positions.size(), layerName);
// Slots are also excluded for the moment
for (SpanPosition position : positions) {
LOG.trace(" | processing {}", position);
ConfigurationSet cfgs = aDiff.getConfigurationSet(position);
if (!shouldMerge(aDiff, cfgs)) {
continue;
}
try {
Map<String, List<CAS>> casMap = new LinkedHashMap<>();
aCases.forEach((k, v) -> casMap.put(k, asList(v)));
AnnotationFS sourceFS = (AnnotationFS) cfgs.getConfigurations().get(0).getRepresentative(casMap);
CasMergeOperationResult result = mergeSpanAnnotation(aTargetDocument, aTargetUsername, type2layer.get(position.getType()), aTargetCas, sourceFS, false);
LOG.trace(" `-> merged annotation with agreement");
switch(result.getState()) {
case CREATED:
created++;
break;
case UPDATED:
updated++;
break;
}
} catch (AnnotationException e) {
LOG.trace(" `-> not merged annotation: {}", e.getMessage());
messages.add(LogMessage.error(this, "%s", e.getMessage()));
}
}
}
// After the spans are in place, we can merge the slot features
for (String layerName : layerNames) {
List<SpanPosition> positions = aDiff.getPositions().stream().filter(pos -> layerName.equals(pos.getType())).filter(pos -> pos instanceof SpanPosition).map(pos -> (SpanPosition) pos).filter(pos -> pos.getFeature() != null).collect(Collectors.toList());
if (positions.isEmpty()) {
continue;
}
LOG.debug("Processing {} slot positions on layer [{}]", positions.size(), layerName);
for (SpanPosition position : positions) {
LOG.trace(" | processing {}", position);
ConfigurationSet cfgs = aDiff.getConfigurationSet(position);
if (!shouldMerge(aDiff, cfgs)) {
continue;
}
try {
Map<String, List<CAS>> casMap = new LinkedHashMap<>();
aCases.forEach((k, v) -> casMap.put(k, asList(v)));
AnnotationFS sourceFS = (AnnotationFS) cfgs.getConfigurations().get(0).getRepresentative(casMap);
AID sourceFsAid = cfgs.getConfigurations().get(0).getRepresentativeAID();
mergeSlotFeature(aTargetDocument, aTargetUsername, type2layer.get(position.getType()), aTargetCas, sourceFS, sourceFsAid.feature, sourceFsAid.index);
LOG.trace(" `-> merged annotation with agreement");
} catch (AnnotationException e) {
LOG.trace(" `-> not merged annotation: {}", e.getMessage());
messages.add(LogMessage.error(this, "%s", e.getMessage()));
}
}
}
// Finally, we merge the relations
for (String layerName : layerNames) {
List<RelationPosition> positions = aDiff.getPositions().stream().filter(pos -> layerName.equals(pos.getType())).filter(pos -> pos instanceof RelationPosition).map(pos -> (RelationPosition) pos).collect(Collectors.toList());
if (positions.isEmpty()) {
continue;
}
LOG.debug("Processing {} relation positions on layer [{}]", positions.size(), layerName);
for (RelationPosition position : positions) {
LOG.trace(" | processing {}", position);
ConfigurationSet cfgs = aDiff.getConfigurationSet(position);
if (!shouldMerge(aDiff, cfgs)) {
continue;
}
try {
Map<String, List<CAS>> casMap = new LinkedHashMap<>();
aCases.forEach((k, v) -> casMap.put(k, asList(v)));
AnnotationFS sourceFS = (AnnotationFS) cfgs.getConfigurations().get(0).getRepresentative(casMap);
CasMergeOperationResult result = mergeRelationAnnotation(aTargetDocument, aTargetUsername, type2layer.get(position.getType()), aTargetCas, sourceFS, false);
LOG.trace(" `-> merged annotation with agreement");
switch(result.getState()) {
case CREATED:
created++;
break;
case UPDATED:
updated++;
break;
}
} catch (AnnotationException e) {
LOG.trace(" `-> not merged annotation: {}", e.getMessage());
messages.add(LogMessage.error(this, "%s", e.getMessage()));
}
}
}
if (eventPublisher != null) {
eventPublisher.publishEvent(new BulkAnnotationEvent(this, aTargetDocument, aTargetUsername, null));
}
}
Aggregations