use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class LegacyGraphConverter method convertToAnnotationGraph.
public static AnnotationGraph convertToAnnotationGraph(SDocument document) {
SDocumentGraph docGraph = document.getDocumentGraph();
SFeature featMatchedIDs = docGraph.getFeature(ANNIS_NS, FEAT_MATCHEDIDS);
Match match = new Match();
if (featMatchedIDs != null && featMatchedIDs.getValue_STEXT() != null) {
match = Match.parseFromString(featMatchedIDs.getValue_STEXT(), ',');
}
// get matched node names by using the IDs
List<Long> matchedNodeIDs = new ArrayList<>();
for (URI u : match.getSaltIDs()) {
SNode node = docGraph.getNode(u.toASCIIString());
if (node == null) {
// that's weird, fallback to the id
log.warn("Could not get matched node from id {}", u.toASCIIString());
matchedNodeIDs.add(-1l);
} else {
RelannisNodeFeature relANNISFeat = (RelannisNodeFeature) node.getFeature(SaltUtil.createQName(ANNIS_NS, FEAT_RELANNIS_NODE)).getValue();
matchedNodeIDs.add(relANNISFeat.getInternalID());
}
}
AnnotationGraph result = convertToAnnotationGraph(docGraph, matchedNodeIDs);
return result;
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class LegacyGraphConverter method convertToAnnotationGraph.
public static AnnotationGraph convertToAnnotationGraph(SDocumentGraph docGraph, List<Long> matchedNodeIDs) {
Set<Long> matchSet = new HashSet<>(matchedNodeIDs);
AnnotationGraph annoGraph = new AnnotationGraph();
List<String> pathList = CommonHelper.getCorpusPath(docGraph.getDocument().getGraph(), docGraph.getDocument());
annoGraph.setPath(pathList.toArray(new String[pathList.size()]));
annoGraph.setDocumentName(docGraph.getDocument().getName());
Map<SNode, AnnisNode> allNodes = new HashMap<>();
for (SNode sNode : docGraph.getNodes()) {
SFeature featNodeRaw = sNode.getFeature(SaltUtil.createQName(ANNIS_NS, FEAT_RELANNIS_NODE));
if (featNodeRaw != null) {
RelannisNodeFeature featNode = (RelannisNodeFeature) featNodeRaw.getValue();
long internalID = featNode.getInternalID();
AnnisNode aNode = new AnnisNode(internalID);
for (SAnnotation sAnno : sNode.getAnnotations()) {
aNode.addNodeAnnotation(new Annotation(sAnno.getNamespace(), sAnno.getName(), sAnno.getValue_STEXT()));
}
aNode.setName(sNode.getName());
Set<SLayer> layers = sNode.getLayers();
if (!layers.isEmpty()) {
aNode.setNamespace(layers.iterator().next().getName());
}
RelannisNodeFeature feat = (RelannisNodeFeature) sNode.getFeature(SaltUtil.createQName(ANNIS_NS, FEAT_RELANNIS_NODE)).getValue();
if (sNode instanceof SToken) {
List<DataSourceSequence> seqList = docGraph.getOverlappedDataSourceSequence(sNode, SALT_TYPE.STEXT_OVERLAPPING_RELATION);
if (seqList != null) {
DataSourceSequence seq = seqList.get(0);
Preconditions.checkNotNull(seq, "DataSourceSequence is null for token %s", sNode.getId());
SSequentialDS seqDS = seq.getDataSource();
Preconditions.checkNotNull(seqDS, "SSequentalDS is null for token %s", sNode.getId());
Preconditions.checkNotNull(seqDS.getData(), "SSequentalDS data is null for token %s", sNode.getId());
String seqDSData = (String) seqDS.getData();
Preconditions.checkNotNull(seqDSData, "casted SSequentalDS data is null for token %s", sNode.getId());
Preconditions.checkNotNull(seq.getStart(), "SSequentalDS start is null for token %s", sNode.getId());
Preconditions.checkNotNull(seq.getEnd(), "SSequentalDS end is null for supposed token %s", sNode.getId());
int start = seq.getStart().intValue();
int end = seq.getEnd().intValue();
Preconditions.checkState(start >= 0 && start <= end && end <= seqDSData.length(), "Illegal start or end of textual DS for token (start %s, end: %s)", sNode.getId(), start, end);
String spannedText = seqDSData.substring(start, end);
Preconditions.checkNotNull(spannedText, "spanned text is null for supposed token %s (start: %s, end: %s)", sNode.getId(), start, end);
aNode.setSpannedText(spannedText);
aNode.setToken(true);
aNode.setTokenIndex(feat.getTokenIndex());
}
} else {
aNode.setToken(false);
aNode.setTokenIndex(null);
}
aNode.setCorpus(feat.getCorpusRef());
aNode.setTextId(feat.getTextRef());
aNode.setLeft(feat.getLeft());
aNode.setLeftToken(feat.getLeftToken());
aNode.setRight(feat.getRight());
aNode.setRightToken(feat.getRightToken());
if (matchSet.contains(aNode.getId())) {
aNode.setMatchedNodeInQuery((long) matchedNodeIDs.indexOf(aNode.getId()) + 1);
annoGraph.getMatchedNodeIds().add(aNode.getId());
} else {
aNode.setMatchedNodeInQuery(null);
}
annoGraph.addNode(aNode);
allNodes.put(sNode, aNode);
}
}
for (SRelation rel : docGraph.getRelations()) {
RelannisEdgeFeature featRelation = RelannisEdgeFeature.extract(rel);
if (featRelation != null) {
addRelation(rel, featRelation.getPre(), featRelation.getComponentID(), allNodes, annoGraph);
}
}
// add relations with empty relation name for every dominance relation
List<SDominanceRelation> dominanceRelations = new LinkedList<>(docGraph.getDominanceRelations());
for (SDominanceRelation rel : dominanceRelations) {
RelannisEdgeFeature featEdge = RelannisEdgeFeature.extract(rel);
if (featEdge != null && featEdge.getArtificialDominanceComponent() != null && featEdge.getArtificialDominancePre() != null) {
addRelation(SDominanceRelation.class, null, rel.getAnnotations(), rel.getSource(), rel.getTarget(), rel.getLayers(), featEdge.getArtificialDominancePre(), featEdge.getArtificialDominanceComponent(), allNodes, annoGraph);
}
}
return annoGraph;
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class TimelineReconstructorTest method testBematacDialog.
/**
* Tests a sample dialog reconstruction.
* The dialog is this one: https://korpling.org/annis3/?id=44b60a56-31da-4469-b438-62fdb67f28f1
*
* The Salt which was generated by ANNIS is loaded and the virtual tokenization is removed.
* It is checked if
* <ul>
* <li>the newly created tokenization is correct</li>
* <li>spans cover the correct token</li>
* </ul>
*/
@Test
public void testBematacDialog() {
SDocumentGraph docGraph = SaltUtil.loadDocumentGraph(URI.createURI(getClass().getResource("SampleDialog.salt").toString()));
Map<String, String> anno2order = new HashMap<>();
anno2order.put("default_ns::instructee_utt", "instructee_dipl");
anno2order.put("default_ns::instructor_utt", "instructor_dipl");
TimelineReconstructor.removeVirtualTokenization(docGraph, anno2order);
// instructor_dipl, instructor_norm, instructee_dipl, instructee_norm, instructee_extra, break
List<STextualDS> texts = docGraph.getTextualDSs();
assertEquals(6, texts.size());
STextualDS instructorDipl = findTextualDSByName("instructor_dipl", texts);
assertNotNull(instructorDipl);
assertEquals("in Richtung des Toasters gehst ja gehst", instructorDipl.getText());
DataSourceSequence<Integer> seq = new DataSourceSequence<>();
seq.setDataSource(instructorDipl);
seq.setStart(instructorDipl.getStart());
seq.setEnd(instructorDipl.getEnd());
List<SToken> instructorDiplToken = docGraph.getTokensBySequence(seq);
assertEquals(7, instructorDiplToken.size());
assertEquals("in", docGraph.getText(instructorDiplToken.get(0)));
assertEquals("Richtung", docGraph.getText(instructorDiplToken.get(1)));
assertEquals("des", docGraph.getText(instructorDiplToken.get(2)));
assertEquals("Toasters", docGraph.getText(instructorDiplToken.get(3)));
assertEquals("gehst", docGraph.getText(instructorDiplToken.get(4)));
assertEquals("ja", docGraph.getText(instructorDiplToken.get(5)));
assertEquals("gehst", docGraph.getText(instructorDiplToken.get(6)));
// check that the other real spans are now connected with the token
List<SNode> uttNode = docGraph.getNodesByName("sSpan1294");
assertNotNull(uttNode);
assertEquals(1, uttNode.size());
SAnnotation uttAnno = uttNode.get(0).getAnnotation("default_ns::instructor_utt");
assertNotNull(uttAnno);
assertEquals("utt", uttAnno.getValue_STEXT());
List<SRelation> uttOutRelations = uttNode.get(0).getOutRelations();
assertNotNull(uttOutRelations);
assertEquals(5, uttOutRelations.size());
for (SRelation rel : uttOutRelations) {
assertTrue(rel instanceof SSpanningRelation);
assertEquals(instructorDipl, CommonHelper.getTextualDSForNode((SNode) rel.getTarget(), docGraph));
}
STextualDS instructorNorm = findTextualDSByName("instructor_norm", texts);
assertNotNull(instructorNorm);
assertEquals("in Richtung des Toasters gehst ja gehst", instructorNorm.getText());
STextualDS instructeeDipl = findTextualDSByName("instructee_dipl", texts);
assertNotNull(instructeeDipl);
assertEquals("mhm ich geh in Richtung des Toasters okay", instructeeDipl.getText());
STextualDS instructeeNorm = findTextualDSByName("instructee_norm", texts);
assertNotNull(instructeeNorm);
assertEquals("ich gehe in Richtung des Toasters okay", instructeeNorm.getText());
STextualDS instructeeExtra = findTextualDSByName("instructee_extra", texts);
assertNotNull(instructeeExtra);
assertEquals("zeichnet", instructeeExtra.getText());
STextualDS breakText = findTextualDSByName("break", texts);
assertNotNull(breakText);
assertEquals("0,7 0,5", breakText.getText());
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class EventExtractor method splitRowsOnGaps.
/**
* Splits events of a row if they contain a gap. Gaps are found using the
* token index (provided as ANNIS specific {@link SFeature}. Inserted events
* have a special style to mark them as gaps.
*
* @param row
* @param graph
* @param startTokenIndex token index of the first token in the match
* @param endTokenIndex token index of the last token in the match
*/
private static void splitRowsOnGaps(Row row, final SDocumentGraph graph, long startTokenIndex, long endTokenIndex) {
ListIterator<GridEvent> itEvents = row.getEvents().listIterator();
while (itEvents.hasNext()) {
GridEvent event = itEvents.next();
int lastTokenIndex = -1;
// sort the coveredIDs
LinkedList<String> sortedCoveredToken = new LinkedList<>(event.getCoveredIDs());
Collections.sort(sortedCoveredToken, new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
SNode node1 = graph.getNode(o1);
SNode node2 = graph.getNode(o2);
if (node1 == node2) {
return 0;
}
if (node1 == null) {
return -1;
}
if (node2 == null) {
return +1;
}
RelannisNodeFeature feat1 = (RelannisNodeFeature) node1.getFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue();
RelannisNodeFeature feat2 = (RelannisNodeFeature) node2.getFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue();
long tokenIndex1 = feat1.getTokenIndex();
long tokenIndex2 = feat2.getTokenIndex();
return ((Long) (tokenIndex1)).compareTo(tokenIndex2);
}
});
// first calculate all gaps
List<GridEvent> gaps = new LinkedList<>();
for (String id : sortedCoveredToken) {
SNode node = graph.getNode(id);
RelannisNodeFeature feat = (RelannisNodeFeature) node.getFeature(ANNIS_NS, FEAT_RELANNIS_NODE).getValue();
long tokenIndexRaw = feat.getTokenIndex();
tokenIndexRaw = clip(tokenIndexRaw, startTokenIndex, endTokenIndex);
int tokenIndex = (int) (tokenIndexRaw - startTokenIndex);
// sanity check
if (tokenIndex >= event.getLeft() && tokenIndex <= event.getRight()) {
int diff = tokenIndex - lastTokenIndex;
if (lastTokenIndex >= 0 && diff > 1) {
// we detected a gap
GridEvent gap = new GridEvent(event.getId() + "_gap_" + gaps.size(), lastTokenIndex + 1, tokenIndex - 1, "");
gap.setGap(true);
gaps.add(gap);
}
lastTokenIndex = tokenIndex;
} else {
// reset gap search when discovered there were token we use for
// hightlighting but do not actually cover
lastTokenIndex = -1;
}
}
// end for each covered token id
ListIterator<GridEvent> itGaps = gaps.listIterator();
// remember the old right value
int oldRight = event.getRight();
int gapNr = 0;
while (itGaps.hasNext()) {
GridEvent gap = itGaps.next();
if (gapNr == 0) {
// shorten original event
event.setRight(gap.getLeft() - 1);
}
// insert the real gap
itEvents.add(gap);
int rightBorder = oldRight;
if (itGaps.hasNext()) {
// don't use the old event right border since the gap should only go until
// the next event
GridEvent nextGap = itGaps.next();
itGaps.previous();
rightBorder = nextGap.getLeft() - 1;
}
// insert a new event node that covers the rest of the event
GridEvent after = new GridEvent(event);
after.setId(event.getId() + "_after_" + gapNr);
after.setLeft(gap.getRight() + 1);
after.setRight(rightBorder);
itEvents.add(after);
gapNr++;
}
}
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class GridComponent method computeTokenRow.
private Row computeTokenRow(List<SNode> tokens, SDocumentGraph graph, LinkedHashMap<String, ArrayList<Row>> rowsByAnnotation, long startIndex, AtomicInteger tokenOffsetForText) {
/* we will only add tokens of one texts which is mentioned by any
included annotation. */
Set<String> validTextIDs = new HashSet<>();
if (enforcedText == null) {
Iterator<ArrayList<Row>> itAllRows = rowsByAnnotation.values().iterator();
while (itAllRows.hasNext()) {
ArrayList<Row> rowsForAnnotation = itAllRows.next();
for (Row r : rowsForAnnotation) {
validTextIDs.addAll(r.getTextIDs());
}
}
/**
* we want to show all token if no valid text was found and we have only one
* text and the first one if there are more than one text.
*/
List<STextualDS> allTexts = graph.getTextualDSs();
if (validTextIDs.isEmpty() && allTexts != null && (allTexts.size() == 1 || allTexts.size() == 2)) {
validTextIDs.add(allTexts.get(0).getId());
}
} else {
validTextIDs.add(enforcedText.getId());
}
Row tokenRow = new Row();
for (SNode t : tokens) {
// get the Salt ID of the STextualDS of this token
STextualDS tokenText = CommonHelper.getTextualDSForNode(t, graph);
// only add token if text ID matches the valid one
if (tokenText != null && validTextIDs.contains(tokenText.getId())) {
RelannisNodeFeature feat = (RelannisNodeFeature) t.getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_RELANNIS_NODE).getValue();
long idxLeft = feat.getLeftToken() - startIndex;
long idxRight = feat.getRightToken() - startIndex;
if (tokenOffsetForText.get() < 0) {
// set the token offset by assuming the first idx must be zero
tokenOffsetForText.set(Math.abs((int) idxLeft));
}
String text = extractTextForToken(t, segmentationName);
GridEvent event = new GridEvent(t.getId(), (int) idxLeft, (int) idxRight, text);
event.setTextID(tokenText.getId());
// check if the token is a matched node
Long match = isCoveredTokenMarked() ? markCoveredTokens(input.getMarkedAndCovered(), t) : tokenMatch(t);
event.setMatch(match);
tokenRow.addEvent(event);
}
}
return tokenRow;
}
Aggregations