use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class GridComponent method createAnnotationGrid.
private void createAnnotationGrid() {
String resultID = input.getId();
grid = new AnnotationGrid(mediaController, pdfController, resultID);
grid.addStyleName(getMainStyle());
grid.addStyleName(Helper.CORPUS_FONT_FORCE);
grid.setEscapeHTML(Boolean.parseBoolean(input.getMappings().getProperty(MAPPING_ESCAPE_HTML, "true")));
LinkedList<Class<? extends SNode>> types = new LinkedList<>();
if (isShowingSpanAnnotations()) {
types.add(SSpan.class);
}
if (isShowingTokenAnnotations()) {
types.add(SToken.class);
}
grid.setAnnosWithNamespace(EventExtractor.computeDisplayedNamespace(input, types));
layout.addComponent(grid);
SDocumentGraph graph = input.getDocument().getDocumentGraph();
List<SNode> tokens = CommonHelper.getSortedSegmentationNodes(segmentationName, graph);
Preconditions.checkArgument(!tokens.isEmpty(), "Token list must be non-empty");
RelannisNodeFeature featTokStart = (RelannisNodeFeature) tokens.get(0).getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_RELANNIS_NODE).getValue();
long startIndex = featTokStart.getTokenIndex();
RelannisNodeFeature featTokEnd = (RelannisNodeFeature) tokens.get(tokens.size() - 1).getFeature(AnnisConstants.ANNIS_NS, AnnisConstants.FEAT_RELANNIS_NODE).getValue();
long endIndex = featTokEnd.getTokenIndex();
LinkedHashMap<String, ArrayList<Row>> rowsByAnnotation = computeAnnotationRows(startIndex, endIndex);
// Get Mappings
String gridTemplates = input.getMappings().getProperty(MAPPING_GRID_TEMPLATES, "");
// Parse Mappings
if (!gridTemplates.equals("")) {
String[] split = gridTemplates.split("\\|\\|");
for (String s : split) {
// example of s: entity="person"==>:), or infstat==><b>%%value%%</b>
String[] unit_split = s.split("==>");
Set set = rowsByAnnotation.entrySet();
// Displaying elements of LinkedHashMap
Iterator iterator = set.iterator();
while (iterator.hasNext()) {
// iterate over rows
Map.Entry me = (Map.Entry) iterator.next();
String rowKey = (String) me.getKey();
ArrayList<Row> rowValue = (ArrayList<Row>) me.getValue();
for (Row rowValue1 : rowValue) {
ArrayList<GridEvent> rowEvents = rowValue1.getEvents();
if (unit_split[0].indexOf('=') < 0) {
// unit_split[0] is a single instruction, e.g., infstat
// check if the key of a row in rowsByAnnotation is unit_split[0]
// if it is, we need to change every value of this row, else we dont do anything
String rowName = rowKey.split("::")[1];
if (rowName.equals(unit_split[0])) {
// iterate over all values and replace the value with the unit_split[1]
for (GridEvent ev : rowEvents) {
String origValue = ev.getValue();
String newValue = unit_split[1].replaceAll("%%value%%", origValue);
ev.setValue(newValue);
}
}
} else {
// its a instruction like entity='person'
// first break this split into entity and person
// check if rowKey is entity, then when iterating over events, check if value is person
String rowName = rowKey.split("::")[1];
String targetRow = unit_split[0].split("=")[0];
String targetValue = unit_split[0].split("=")[1].replaceAll("\"", "");
if (rowName.equals(targetRow)) {
// iterate over all values and replace the value with the unit_split[1]
for (GridEvent ev : rowEvents) {
String origValue = ev.getValue();
if (origValue.equals(targetValue)) {
ev.setValue(unit_split[1]);
}
// String newValue = unit_split[1].replaceAll("%%value%%",origValue);
}
}
}
}
}
}
}
// add tokens as row
AtomicInteger tokenOffsetForText = new AtomicInteger(-1);
Row tokenRow = computeTokenRow(tokens, graph, rowsByAnnotation, startIndex, tokenOffsetForText);
if (isHidingToken()) {
tokenRow.setStyle("invisible_token");
}
if (isTokenFirst()) {
// copy original list but add token row at the beginning
LinkedHashMap<String, ArrayList<Row>> newList = new LinkedHashMap<>();
newList.put("tok", Lists.newArrayList(tokenRow));
newList.putAll(rowsByAnnotation);
rowsByAnnotation = newList;
} else {
// just add the token row to the end of the list
rowsByAnnotation.put("tok", Lists.newArrayList(tokenRow));
}
EventExtractor.removeEmptySpace(rowsByAnnotation, tokenRow);
// check if the token row only contains empty values
boolean tokenRowIsEmpty = true;
for (GridEvent tokenEvent : tokenRow.getEvents()) {
if (tokenEvent.getValue() != null && !tokenEvent.getValue().trim().isEmpty()) {
tokenRowIsEmpty = false;
break;
}
}
if (!isHidingToken() && canShowEmptyTokenWarning()) {
lblEmptyToken.setVisible(tokenRowIsEmpty);
}
grid.setRowsByAnnotation(rowsByAnnotation);
grid.setTokenIndexOffset(tokenOffsetForText.get());
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class RSTImpl method appendChild.
private JSONObject appendChild(JSONObject root, JSONObject node, SNode currSnode) {
try {
// is set to true, when currNode is reached by an rst edge
boolean isAppendedToParent = false;
List<SRelation<SNode, SNode>> in = currSnode.getGraph().getInRelations(currSnode.getId());
if (in != null) {
for (SRelation<SNode, SNode> e : in) {
if (hasRSTType(e)) {
JSONObject tmp;
if (st.size() > 1) {
tmp = st.pop();
getOrCreateArray(st.peek(), "children").put(node);
sortChildren(st.peek());
st.push(tmp);
} else {
getOrCreateArray(result, "children").put(node);
}
setSentenceSpan(node, st.peek());
isAppendedToParent = true;
break;
}
}
}
if (!isAppendedToParent) {
getOrCreateArray(root, "children").put(node);
setSentenceSpan(node, root);
sortChildren(root);
}
} catch (JSONException ex) {
log.error("cannot append {}", node, ex);
}
return node;
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class RSTImpl method transformSaltToJSON.
private String transformSaltToJSON(VisualizerInput visInput) {
graph = visInput.getSResult().getDocumentGraph();
List<SNode> rootSNodes = graph.getRoots();
List<SNode> rstRoots = new ArrayList<SNode>();
for (SNode sNode : rootSNodes) {
if (CommonHelper.checkSLayer(namespace, sNode)) {
rstRoots.add(sNode);
}
}
if (rootSNodes.size() > 0) {
// collect all sentence and sort them.
graph.traverse(rstRoots, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "getSentences", new GraphTraverseHandler() {
@Override
public void nodeReached(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SNode currNode, SRelation sRelation, SNode fromNode, long order) {
if (currNode instanceof SStructure && isSegment(currNode)) {
sentences.add((SStructure) currNode);
}
}
@Override
public void nodeLeft(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SNode currNode, SRelation edge, SNode fromNode, long order) {
}
@Override
public boolean checkConstraint(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SRelation edge, SNode currNode, long order) {
// token are not needed
if (currNode instanceof SToken) {
return false;
}
return true;
}
});
// decorate segments with sentence number
int i = 1;
for (SStructure sentence : sentences) {
sentence.createProcessingAnnotation(SENTENCE_INDEX, SENTENCE_INDEX, Integer.toString(i));
i++;
}
graph.traverse(rstRoots, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "jsonBuild", this);
} else {
log.debug("does not find an annotation which matched {}", ANNOTATION_KEY);
graph.traverse(rstRoots, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "jsonBuild", this);
}
return result.toString();
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class RSTImpl method createJsonEntry.
private JSONObject createJsonEntry(SNode currNode) {
JSONObject jsonData = new JSONObject();
StringBuilder sb = new StringBuilder();
// use a hash set so we don't get any duplicate entries
LinkedHashSet<SToken> token = new LinkedHashSet<>();
List<SRelation<SNode, SNode>> edges;
if (currNode instanceof SStructure) {
edges = currNode.getGraph().getOutRelations(currNode.getId());
// get all tokens directly dominated tokens and build a string
for (SRelation<SNode, SNode> sedge : edges) {
if (sedge.getTarget() instanceof SToken) {
token.add((SToken) sedge.getTarget());
}
}
// build strings
Iterator<SToken> tokIterator = token.iterator();
while (tokIterator.hasNext()) {
SToken tok = tokIterator.next();
String text = getText(tok);
String color = getHTMLColor(tok);
if (color != null) {
sb.append("<span style=\"color : ").append(color).append(";\">");
} else {
sb.append("<span>");
}
if (tokIterator.hasNext()) {
sb.append(text).append(" ");
} else {
sb.append(text);
}
sb.append("</span>");
}
}
try {
// build unique id, cause is used for an unique html element id.
jsonData.put("id", getUniStrId(currNode));
jsonData.put("name", currNode.getName());
/**
* additional data oject for edge labels and rendering sentences
*/
JSONObject data = new JSONObject();
JSONArray edgesJSON = getOutGoingEdgeTypeAnnotation(currNode);
// since we have found some tokens, it must be a sentence in RST.
if (token.size() > 0) {
data.put("sentence", sb.toString());
}
if (edgesJSON != null) {
data.put("edges", edgesJSON);
}
if (currNode instanceof SStructure && isSegment(currNode)) {
SProcessingAnnotation sentence_idx = currNode.getProcessingAnnotation(SENTENCE_INDEX + "::" + SENTENCE_INDEX);
int index = sentence_idx == null ? -1 : Integer.parseInt(sentence_idx.getValue_STEXT());
data.put(SENTENCE_LEFT, index);
data.put(SENTENCE_RIGHT, index);
}
jsonData.put("data", data);
} catch (JSONException ex) {
log.error("problems create entry for {}", currNode, ex);
}
return jsonData;
}
use of org.corpus_tools.salt.core.SNode in project ANNIS by korpling.
the class SaltAnnotateExtractor method testAndFixNonSpan.
/**
* Tests if the source node is not a span and fixes this if necessary
*
* @param sourceNode The source node to check.
* @param nodeByRankID
* @return Either the original span or a new created one
*/
private SSpan testAndFixNonSpan(SStructuredNode sourceNode, FastInverseMap<Long, SNode> nodeByRankID) {
if (sourceNode != null && !(sourceNode instanceof SSpan)) {
log.debug("Mismatched source type: should be SSpan");
SNode oldNode = sourceNode;
sourceNode = recreateNode(SSpan.class, sourceNode);
updateMapAfterRecreatingNode(oldNode, sourceNode, nodeByRankID);
}
return (SSpan) sourceNode;
}
Aggregations