use of org.structr.web.entity.dom.DOMNode in project structr by structr.
the class SaveNodeCommand method processMessage.
@Override
public void processMessage(final WebSocketMessage webSocketData) {
final String nodeId = webSocketData.getId();
final Map<String, Object> nodeData = webSocketData.getNodeData();
final String modifiedHtml = (String) nodeData.get("source");
final SecurityContext securityContext = getWebSocket().getSecurityContext();
final App app = StructrApp.getInstance(securityContext);
Page modifiedNode = null;
DOMNode sourceNode = (DOMNode) getNode(nodeId);
if (sourceNode != null) {
TransactionCommand.registerNodeCallback(sourceNode, callback);
try {
// parse page from modified source
modifiedNode = Importer.parsePageFromSource(securityContext, modifiedHtml, "__SaveNodeCommand_Temporary_Page__");
DOMNode targetNode = modifiedNode;
if (!(sourceNode instanceof Page)) {
targetNode = (DOMNode) modifiedNode.getFirstChild().getNextSibling().getFirstChild().getNextSibling().getFirstChild();
}
final List<InvertibleModificationOperation> changeSet = Importer.diffNodes(sourceNode, targetNode);
for (final InvertibleModificationOperation op : changeSet) {
// execute operation
op.apply(app, sourceNode.getClosestPage(), modifiedNode);
}
} catch (Throwable t) {
logger.warn("", t);
// send exception
getWebSocket().send(MessageBuilder.status().code(422).message(t.toString()).build(), true);
}
try {
app.delete(modifiedNode);
} catch (FrameworkException ex) {
logger.warn("", ex);
}
} else {
// send exception
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot save page").build(), true);
}
}
use of org.structr.web.entity.dom.DOMNode in project structr by structr.
the class InsertBeforeDOMNodeCommand method processMessage.
@Override
public void processMessage(final WebSocketMessage webSocketData) {
final Map<String, Object> nodeData = webSocketData.getNodeData();
final String parentId = (String) nodeData.get("parentId");
final String newId = (String) nodeData.get("newId");
final String refId = (String) nodeData.get("refId");
final String pageId = webSocketData.getPageId();
if (pageId != null) {
// check for parent ID before creating any nodes
if (parentId == null) {
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot replace node without parentId").build(), true);
return;
}
// check if parent node with given ID exists
final DOMNode parentNode = getDOMNode(parentId);
if (parentNode == null) {
getWebSocket().send(MessageBuilder.status().code(404).message("Parent node not found").build(), true);
return;
}
// check for old ID before creating any nodes
if (refId == null) {
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot insert node without refId").build(), true);
return;
}
// check if old node with given ID exists
final DOMNode refNode = getDOMNode(refId);
if (refNode == null) {
getWebSocket().send(MessageBuilder.status().code(404).message("Reference node not found").build(), true);
return;
}
// check for new ID before creating any nodes
if (newId == null) {
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot replace node without newId").build(), true);
return;
}
// check if new node with given ID exists
final DOMNode newNode = getDOMNode(newId);
if (newNode == null) {
getWebSocket().send(MessageBuilder.status().code(404).message("New node not found").build(), true);
return;
}
try {
parentNode.insertBefore(newNode, refNode);
} catch (DOMException dex) {
// send DOM exception
getWebSocket().send(MessageBuilder.status().code(422).message(dex.getMessage()).build(), true);
}
} else {
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot insert node without pageId").build(), true);
}
}
use of org.structr.web.entity.dom.DOMNode in project structr by structr.
the class WrapDOMNodeCommand method processMessage.
@Override
public void processMessage(final WebSocketMessage webSocketData) {
final Map<String, Object> nodeData = webSocketData.getNodeData();
final String pageId = webSocketData.getPageId();
final String nodeId = (String) nodeData.get("nodeId");
final Boolean inheritVisibilityFlags = (Boolean) nodeData.get("inheritVisibilityFlags");
nodeData.remove("nodeId");
nodeData.remove("inheritVisibilityFlags");
if (pageId != null) {
// check for parent ID before creating any nodes
if (nodeId == null) {
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot wrap node without nodeId").build(), true);
return;
}
// check if content node with given ID exists
final DOMNode oldNode = getDOMNode(nodeId);
if (oldNode == null) {
getWebSocket().send(MessageBuilder.status().code(404).message("Node not found").build(), true);
return;
}
final Document document = getPage(pageId);
if (document != null) {
final String tagName = (String) nodeData.get("tagName");
nodeData.remove("tagName");
final DOMNode parentNode = (DOMNode) oldNode.getParentNode();
if (parentNode == null) {
getWebSocket().send(MessageBuilder.status().code(404).message("Node has no parent node").build(), true);
return;
}
try {
DOMNode newNode;
if (tagName != null && "comment".equals(tagName)) {
newNode = (DOMNode) document.createComment("#comment");
} else if (tagName != null && "template".equals(tagName)) {
newNode = (DOMNode) document.createTextNode("#template");
try {
newNode.unlockSystemPropertiesOnce();
newNode.setProperties(newNode.getSecurityContext(), new PropertyMap(NodeInterface.type, Template.class.getSimpleName()));
} catch (FrameworkException fex) {
logger.warn("Unable to set type of node {} to Template: {}", new Object[] { newNode.getUuid(), fex.getMessage() });
}
} else if (tagName != null && !tagName.isEmpty()) {
newNode = (DOMNode) document.createElement(tagName);
} else {
getWebSocket().send(MessageBuilder.status().code(404).message("Cannot create node without tagname").build(), true);
return;
}
// Instantiate node again to get correct class
newNode = getDOMNode(newNode.getUuid());
// append new node to parent
if (newNode != null) {
parentNode.replaceChild(newNode, oldNode);
newNode.appendChild(oldNode);
if (inheritVisibilityFlags) {
PropertyMap visibilityFlags = new PropertyMap();
visibilityFlags.put(DOMNode.visibleToAuthenticatedUsers, parentNode.getProperty(DOMNode.visibleToAuthenticatedUsers));
visibilityFlags.put(DOMNode.visibleToPublicUsers, parentNode.getProperty(DOMNode.visibleToPublicUsers));
try {
newNode.setProperties(newNode.getSecurityContext(), visibilityFlags);
} catch (FrameworkException fex) {
logger.warn("Unable to inherit visibility flags for node {} from parent node {}", newNode, parentNode);
}
}
}
} catch (DOMException dex) {
// send DOM exception
getWebSocket().send(MessageBuilder.status().code(422).message(dex.getMessage()).build(), true);
}
} else {
getWebSocket().send(MessageBuilder.status().code(404).message("Page not found").build(), true);
}
} else {
getWebSocket().send(MessageBuilder.status().code(422).message("Cannot wrap node without pageId").build(), true);
}
}
use of org.structr.web.entity.dom.DOMNode in project structr by structr.
the class CloudConnection method storeNode.
public NodeInterface storeNode(final DataContainer receivedData) throws FrameworkException {
final SecurityContext securityContext = SecurityContext.getSuperUserInstance();
final NodeDataContainer receivedNodeData = (NodeDataContainer) receivedData;
final String typeName = receivedNodeData.getType();
final Class nodeType = config.getNodeEntityClass(typeName);
if (nodeType == null) {
logger.error("Unknown entity type {}", typeName);
return null;
}
// skip builtin schema node types
if (Boolean.TRUE.equals(receivedNodeData.getProperties().get(SchemaNode.isBuiltinType.dbName()))) {
return null;
}
final String uuid = receivedNodeData.getSourceNodeId();
GraphObject newOrExistingNode = app.get(nodeType, uuid);
if (newOrExistingNode != null) {
// merge properties
newOrExistingNode.setProperties(securityContext, PropertyMap.databaseTypeToJavaType(securityContext, nodeType, receivedNodeData.getProperties()));
} else {
final PropertyMap properties = PropertyMap.databaseTypeToJavaType(securityContext, nodeType, receivedNodeData.getProperties());
final List<DOMNode> existingChildren = new LinkedList<>();
// special handling for ShadowDocument (all others must be deleted)
if (ShadowDocument.class.getSimpleName().equals(typeName)) {
// delete shadow document
for (ShadowDocument existingDoc : app.nodeQuery(ShadowDocument.class).includeDeletedAndHidden().getAsList()) {
existingChildren.addAll(existingDoc.getProperty(Page.elements));
app.delete(existingDoc);
}
// add existing children to new shadow document
properties.put(Page.elements, existingChildren);
}
// create node
newOrExistingNode = app.create(nodeType, properties);
}
idMap.put(receivedNodeData.getSourceNodeId(), newOrExistingNode.getUuid());
count++;
total++;
return (NodeInterface) newOrExistingNode;
}
use of org.structr.web.entity.dom.DOMNode in project structr by structr.
the class Importer method diffNodes.
public static List<InvertibleModificationOperation> diffNodes(final DOMNode sourceNode, final DOMNode modifiedNode) {
if (sourceNode == null) {
logger.warn("Source node was null, returning empty change set.");
return Collections.EMPTY_LIST;
}
if (modifiedNode == null) {
logger.warn("Modified node was null, returning empty change set.");
return Collections.EMPTY_LIST;
}
final List<InvertibleModificationOperation> changeSet = new LinkedList<>();
final Map<String, DOMNode> indexMappedExistingNodes = new LinkedHashMap<>();
final Map<String, DOMNode> hashMappedExistingNodes = new LinkedHashMap<>();
final Map<DOMNode, Integer> depthMappedExistingNodes = new LinkedHashMap<>();
final Map<String, DOMNode> indexMappedNewNodes = new LinkedHashMap<>();
final Map<String, DOMNode> hashMappedNewNodes = new LinkedHashMap<>();
final Map<DOMNode, Integer> depthMappedNewNodes = new LinkedHashMap<>();
InvertibleModificationOperation.collectNodes(sourceNode, indexMappedExistingNodes, hashMappedExistingNodes, depthMappedExistingNodes);
InvertibleModificationOperation.collectNodes(modifiedNode, indexMappedNewNodes, hashMappedNewNodes, depthMappedNewNodes);
// iterate over existing nodes and try to find deleted ones
for (final Iterator<Map.Entry<String, DOMNode>> it = hashMappedExistingNodes.entrySet().iterator(); it.hasNext(); ) {
final Map.Entry<String, DOMNode> existingNodeEntry = it.next();
final DOMNode existingNode = existingNodeEntry.getValue();
final String existingHash = existingNode.getIdHash();
// check for deleted nodes ignoring Page nodes
if (!hashMappedNewNodes.containsKey(existingHash) && !(existingNode instanceof Page)) {
changeSet.add(new DeleteOperation(hashMappedExistingNodes, existingNode));
}
}
// iterate over new nodes and try to find new ones
for (final Iterator<Map.Entry<String, DOMNode>> it = indexMappedNewNodes.entrySet().iterator(); it.hasNext(); ) {
final Map.Entry<String, DOMNode> newNodeEntry = it.next();
final DOMNode newNode = newNodeEntry.getValue();
// if newNode is a content element, do not rely on local hash property
String newHash = newNode.getDataHash();
if (newHash == null) {
newHash = newNode.getIdHash();
}
// check for deleted nodes ignoring Page nodes
if (!hashMappedExistingNodes.containsKey(newHash) && !(newNode instanceof Page)) {
final DOMNode newParent = newNode.getParent();
changeSet.add(new CreateOperation(hashMappedExistingNodes, getHashOrNull(newParent), getSiblingHashes(newNode), newNode, depthMappedNewNodes.get(newNode)));
}
}
// compare all new nodes with all existing nodes
for (final Map.Entry<String, DOMNode> newNodeEntry : indexMappedNewNodes.entrySet()) {
final String newTreeIndex = newNodeEntry.getKey();
final DOMNode newNode = newNodeEntry.getValue();
for (final Map.Entry<String, DOMNode> existingNodeEntry : indexMappedExistingNodes.entrySet()) {
final String existingTreeIndex = existingNodeEntry.getKey();
final DOMNode existingNode = existingNodeEntry.getValue();
DOMNode newParent = null;
int equalityBitmask = 0;
if (newTreeIndex.equals(existingTreeIndex)) {
equalityBitmask |= 1;
}
if (newNode.getIdHashOrProperty().equals(existingNode.getIdHash())) {
equalityBitmask |= 2;
}
if (newNode.contentEquals(existingNode)) {
equalityBitmask |= 4;
}
switch(equalityBitmask) {
case // same tree index (1), same node (2), same content (4) => node is completely unmodified
7:
break;
case // same content (2), same node (4), NOT same tree index => node has moved
6:
newParent = newNode.getParent();
changeSet.add(new MoveOperation(hashMappedExistingNodes, getHashOrNull(newParent), getSiblingHashes(newNode), newNode, existingNode));
break;
case // same tree index (1), NOT same node, same content (5) => node was deleted and restored, maybe the identification information was lost
5:
break;
case // NOT same tree index, NOT same node, same content (4) => different node, content is equal by chance?
4:
break;
case // same tree index, same node, NOT same content => node was modified but not moved
3:
changeSet.add(new UpdateOperation(hashMappedExistingNodes, existingNode, newNode));
break;
case // NOT same tree index, same node (2), NOT same content => node was moved and changed
2:
newParent = newNode.getParent();
changeSet.add(new UpdateOperation(hashMappedExistingNodes, existingNode, newNode));
changeSet.add(new MoveOperation(hashMappedExistingNodes, getHashOrNull(newParent), getSiblingHashes(newNode), newNode, existingNode));
break;
case // same tree index (1), NOT same node, NOT same content => ignore
1:
break;
case // NOT same tree index, NOT same node, NOT same content => ignore
0:
break;
}
}
}
return changeSet;
}
Aggregations