use of org.alfresco.repo.domain.node.NodeEntity in project alfresco-repository by Alfresco.
the class NodeServiceTest method testConcurrentLinkToDeletedNode.
/**
* Ensure that nodes cannot be linked to deleted nodes.
* <p/>
* Conditions that <i>might</i> cause this are:<br/>
* <ul>
* <li>Node created within a parent node that is being deleted</li>
* <li>The node cache is temporarily incorrect when the association is made</li>
* </ul>
* <p/>
* <a href="https://issues.alfresco.com/jira/browse/ALF-12358">Concurrency: Possible to create association references to deleted nodes</a>
*/
@Test
public void testConcurrentLinkToDeletedNode() throws Throwable {
// First find any broken links to start with
final NodeEntity params = new NodeEntity();
params.setId(0L);
params.setTypeQNameId(deletedTypeQNameId);
// Find all 'at risk' nodes before the test
final List<Long> attachedToDeletedIdsBefore = getChildNodesWithDeletedParentNode(params, 0);
logger.debug("Found child nodes with deleted parent node (before): " + attachedToDeletedIdsBefore);
final List<Long> orphanedNodeIdsBefore = getChildNodesWithNoParentNode(params, 0);
logger.debug("Found child nodes without parent (before): " + orphanedNodeIdsBefore);
final NodeRef[] nodeRefs = new NodeRef[10];
final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
// Fire off a bunch of threads that create random nodes within the hierarchy created above
final RetryingTransactionCallback<NodeRef> createChildCallback = new RetryingTransactionCallback<NodeRef>() {
@Override
public NodeRef execute() throws Throwable {
String randomName = this.getClass().getName() + "-" + GUID.generate();
QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
Map<QName, Serializable> props = new HashMap<QName, Serializable>();
props.put(ContentModel.PROP_NAME, randomName);
// Choose a random parent node from the hierarchy
int random = new Random().nextInt(10);
return nodeService.createNode(nodeRefs[random], ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_CONTAINER, props).getChildRef();
}
};
final Runnable[] runnables = new Runnable[20];
final List<NodeRef> nodesAtRisk = Collections.synchronizedList(new ArrayList<NodeRef>(100));
final List<Thread> threads = new ArrayList<Thread>();
for (int i = 0; i < runnables.length; i++) {
runnables[i] = new Runnable() {
@Override
public synchronized void run() {
AuthenticationUtil.setRunAsUserSystem();
try {
// A short wait before we kick off (should be notified)
wait(1000L);
for (int i = 0; i < 100; i++) {
NodeRef nodeRef = txnService.getRetryingTransactionHelper().doInTransaction(createChildCallback);
// Store the node for later checks
nodesAtRisk.add(nodeRef);
// Wait to give other threads a chance
wait(1L);
}
} catch (Throwable e) {
// This is expected i.e. we'll just keep doing it until failure
logger.debug("Got exception adding child node: ", e);
}
}
};
Thread thread = new Thread(runnables[i]);
threads.add(thread);
thread.start();
}
final RetryingTransactionCallback<NodeRef> deleteWithNestedCallback = new RetryingTransactionCallback<NodeRef>() {
@Override
public NodeRef execute() throws Throwable {
// Notify the threads to kick off
for (int i = 0; i < runnables.length; i++) {
// Notify the threads to stop waiting
synchronized (runnables[i]) {
runnables[i].notify();
}
// Short wait to give thread a chance to run
synchronized (this) {
try {
wait(10L);
} catch (Throwable e) {
}
}
;
}
// add the Temporary aspect to make the deletion faster (it will not be moved to the archival store)
nodeService.addAspect(nodeRefs[0], ContentModel.ASPECT_TEMPORARY, null);
// Delete the parent node
nodeService.deleteNode(nodeRefs[0]);
return null;
}
};
txnService.getRetryingTransactionHelper().doInTransaction(deleteWithNestedCallback);
// Wait for the threads to finish
for (Thread t : threads) {
t.join();
}
logger.info("All threads should have finished");
// Find all 'at risk' nodes after the test
final List<Long> attachedToDeletedIdsAfter = getChildNodesWithDeletedParentNode(params, attachedToDeletedIdsBefore.size());
logger.debug("Found child nodes with deleted parent node (after): " + attachedToDeletedIdsAfter);
final List<Long> orphanedNodeIdsAfter = getChildNodesWithNoParentNode(params, orphanedNodeIdsBefore.size());
logger.debug("Found child nodes without parent (after): " + attachedToDeletedIdsAfter);
if (attachedToDeletedIdsAfter.isEmpty() && orphanedNodeIdsAfter.isEmpty()) {
// nothing more to test
return;
}
// workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
for (final NodeRef nodeRef : nodesAtRisk) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
if (nodeService.exists(nodeRef)) {
// ignore return
nodeService.getPath(nodeRef);
}
return null;
}
});
}
// Find all 'at risk' nodes after the test
final List<Long> attachedToDeletedIdsCleaned = getChildNodesWithDeletedParentNode(params, attachedToDeletedIdsBefore.size());
logger.debug("Found child nodes with deleted parent node (cleaned): " + attachedToDeletedIdsAfter);
final List<Long> orphanedNodeIdsCleaned = getChildNodesWithNoParentNode(params, orphanedNodeIdsBefore.size());
logger.debug("Found child nodes without parent (cleaned): " + attachedToDeletedIdsAfter);
// Check
assertTrue("Expected full cleanup of nodes referencing deleted nodes: " + attachedToDeletedIdsCleaned, attachedToDeletedIdsCleaned.isEmpty());
assertTrue("Expected full cleanup of nodes referencing without parents: " + orphanedNodeIdsCleaned, orphanedNodeIdsCleaned.isEmpty());
// check lost_found ...
List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
assertFalse(lostAndFoundNodeRefs.isEmpty());
Set<Long> lostAndFoundNodeIds = new HashSet<Long>(lostAndFoundNodeRefs.size());
for (NodeRef nodeRef : lostAndFoundNodeRefs) {
lostAndFoundNodeIds.add((Long) nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
}
assertTrue("Nodes linked to deleted parent nodes not handled.", lostAndFoundNodeIds.containsAll(attachedToDeletedIdsAfter));
assertTrue("Orphaned nodes not all handled.", lostAndFoundNodeIds.containsAll(orphanedNodeIdsAfter));
// Now fail because we allowed the situation in the first place
fail("We allowed orphaned nodes or nodes with deleted parents.");
}
use of org.alfresco.repo.domain.node.NodeEntity in project alfresco-repository by Alfresco.
the class NodeServiceTest method testLinkToDeletedNodeRecovery.
/**
* Test for MNT-8494 - we should be able to recover when indexing encounters a node with deleted ancestors
*/
@Test
public void testLinkToDeletedNodeRecovery() throws Throwable {
// First find any broken links to start with
final NodeEntity params = new NodeEntity();
params.setId(0L);
params.setTypeQNameId(deletedTypeQNameId);
List<Long> nodesWithDeletedParents = getChildNodesWithDeletedParentNode(params, 0);
List<Long> deletedChildren = getDeletedChildren(params, 0);
List<Long> nodesWithNoParents = getChildNodesWithNoParentNode(params, 0);
logger.debug("Found child nodes with deleted parent node (before): " + nodesWithDeletedParents);
final NodeRef[] nodeRefs = new NodeRef[10];
final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
int cnt = 5;
final List<NodeRef> childNodeRefs = new ArrayList<NodeRef>(cnt);
final NodeDAO nodeDAO = (NodeDAO) APP_CONTEXT_INIT.getApplicationContext().getBean("nodeDAO");
for (int i = 0; i < cnt; i++) {
// create some pseudo- thumnails
String randomName = this.getClass().getName() + "-" + System.nanoTime();
QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
Map<QName, Serializable> props = new HashMap<QName, Serializable>();
props.put(ContentModel.PROP_NAME, randomName);
// Choose a random parent node from the hierarchy
int random = new Random().nextInt(10);
NodeRef parentNodeRef = nodeRefs[random];
NodeRef childNodeRef = nodeService.createNode(parentNodeRef, ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_THUMBNAIL, props).getChildRef();
childNodeRefs.add(childNodeRef);
}
// forcefully delete the root, a random connecting one, and a random leaf
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
Long nodeId = (Long) nodeService.getProperty(nodeRefs[0], ContentModel.PROP_NODE_DBID);
nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
nodeDAO.removeNodeAspects(nodeId);
nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
nodeId = (Long) nodeService.getProperty(nodeRefs[2], ContentModel.PROP_NODE_DBID);
nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
nodeDAO.removeNodeAspects(nodeId);
nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
nodeId = (Long) nodeService.getProperty(childNodeRefs.get(childNodeRefs.size() - 1), ContentModel.PROP_NODE_DBID);
nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
nodeDAO.removeNodeAspects(nodeId);
nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
return null;
}
});
// Now need to identify the problem nodes
final List<Long> childNodeIds = getChildNodesWithDeletedParentNode(params, nodesWithDeletedParents.size());
assertFalse(childNodeIds.isEmpty());
logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
// Now visit the nodes in reverse order and do indexing-like things
List<NodeRef> allNodeRefs = new ArrayList<NodeRef>(nodeRefs.length + childNodeRefs.size());
allNodeRefs.addAll(Arrays.asList(nodeRefs));
allNodeRefs.addAll(childNodeRefs);
Collections.reverse(allNodeRefs);
for (final NodeRef nodeRef : allNodeRefs) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
if (nodeService.exists(nodeRef)) {
try {
for (ChildAssociationRef parentRef : nodeService.getParentAssocs(nodeRef)) {
nodeService.getPath(parentRef.getParentRef());
}
// ignore return
nodeService.getPath(nodeRef);
} catch (InvalidNodeRefException e) {
throw new ConcurrencyFailureException("Deleted node - should be healed on retry", e);
}
}
return null;
}
});
}
// might
for (final NodeRef nodeRef : allNodeRefs) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
nodeDAO.getNodePair(nodeRef);
return null;
}
});
}
// Check again
List<Long> nodeIds = getDeletedChildren(params, deletedChildren.size());
assertTrue("The following deleted nodes still have parents: " + nodeIds, nodeIds.isEmpty());
nodeIds = getChildNodesWithDeletedParentNode(params, nodesWithDeletedParents.size());
assertTrue("The following child nodes have deleted parent nodes: " + nodeIds, nodeIds.isEmpty());
nodeIds = getChildNodesWithNoParentNode(params, nodesWithNoParents.size());
assertTrue("The following child nodes have no parent node: " + nodeIds, nodeIds.isEmpty());
// check lost_found ...
final List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
assertFalse(lostAndFoundNodeRefs.isEmpty());
final List<Long> lostAndFoundNodeIds = new ArrayList<Long>(lostAndFoundNodeRefs.size());
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
for (NodeRef nodeRef : lostAndFoundNodeRefs) {
Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
lostAndFoundNodeIds.add(nodeId);
}
return null;
}
});
for (final Long childNodeId : childNodeIds) {
Boolean exists = txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Boolean>() {
@Override
public Boolean execute() throws Throwable {
return nodeDAO.exists(childNodeId);
}
});
assertTrue("Not found: " + childNodeId, lostAndFoundNodeIds.contains(childNodeId) || !exists);
}
}
use of org.alfresco.repo.domain.node.NodeEntity in project alfresco-repository by Alfresco.
the class NodeServiceTest method testForceNonRootNodeWithNoParentNode.
/**
* Pending repeatable test - force issue ALF-ALF-13066 (non-root node with no parent)
*/
@Test
public void testForceNonRootNodeWithNoParentNode() throws Throwable {
// First find any broken links to start with
final NodeEntity params = new NodeEntity();
params.setId(0L);
params.setTypeQNameId(deletedTypeQNameId);
List<Long> ids = getChildNodesWithNoParentNode(params, 0);
logger.debug("Found child nodes with deleted parent node (before): " + ids);
final int idsToSkip = ids.size();
final NodeRef[] nodeRefs = new NodeRef[10];
final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
int cnt = 5;
List<NodeRef> childNodeRefs = new ArrayList<NodeRef>(cnt);
final NodeDAO nodeDAO = (NodeDAO) APP_CONTEXT_INIT.getApplicationContext().getBean("nodeDAO");
for (int i = 0; i < cnt; i++) {
// create some pseudo- thumnails
String randomName = this.getClass().getName() + "-" + System.nanoTime();
QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
Map<QName, Serializable> props = new HashMap<QName, Serializable>();
props.put(ContentModel.PROP_NAME, randomName);
// Choose a random parent node from the hierarchy
int random = new Random().nextInt(10);
NodeRef parentNodeRef = nodeRefs[random];
NodeRef childNodeRef = nodeService.createNode(parentNodeRef, ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_THUMBNAIL, props).getChildRef();
childNodeRefs.add(childNodeRef);
// forcefully remove the primary parent assoc
final Long childNodeId = (Long) nodeService.getProperty(childNodeRef, ContentModel.PROP_NODE_DBID);
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
Pair<Long, ChildAssociationRef> assocPair = nodeDAO.getPrimaryParentAssoc(childNodeId);
nodeDAO.deleteChildAssoc(assocPair.getFirst());
return null;
}
});
}
// Now need to identify the problem nodes
final List<Long> childNodeIds = getChildNodesWithNoParentNode(params, idsToSkip);
assertFalse(childNodeIds.isEmpty());
logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
// workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
for (final NodeRef nodeRef : childNodeRefs) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
if (nodeService.exists(nodeRef)) {
// ignore return
nodeService.getPath(nodeRef);
}
return null;
}
});
}
// check again ...
ids = getChildNodesWithNoParentNode(params, idsToSkip);
assertTrue("The following child nodes have no parent node: " + ids, ids.isEmpty());
// check lost_found ...
List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
assertFalse(lostAndFoundNodeRefs.isEmpty());
List<Long> lostAndFoundNodeIds = new ArrayList<Long>(lostAndFoundNodeRefs.size());
for (NodeRef nodeRef : lostAndFoundNodeRefs) {
lostAndFoundNodeIds.add((Long) nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
}
for (Long childNodeId : childNodeIds) {
assertTrue("Not found: " + childNodeId, lostAndFoundNodeIds.contains(childNodeId) || !nodeDAO.exists(childNodeId));
}
}
use of org.alfresco.repo.domain.node.NodeEntity in project alfresco-repository by Alfresco.
the class NodeDAOImpl method deleteNodeAssoc.
@Override
protected int deleteNodeAssoc(Long sourceNodeId, Long targetNodeId, Long assocTypeQNameId) {
NodeAssocEntity assoc = new NodeAssocEntity();
assoc.setTypeQNameId(assocTypeQNameId);
// Source
NodeEntity sourceNode = new NodeEntity();
sourceNode.setId(sourceNodeId);
assoc.setSourceNode(sourceNode);
// Target
NodeEntity targetNode = new NodeEntity();
targetNode.setId(targetNodeId);
assoc.setTargetNode(targetNode);
return template.delete(DELETE_NODE_ASSOC, assoc);
}
use of org.alfresco.repo.domain.node.NodeEntity in project alfresco-repository by Alfresco.
the class NodeDAOImpl method selectParentAssocs.
@Override
protected void selectParentAssocs(Long childNodeId, QName assocTypeQName, QName assocQName, Boolean isPrimary, ChildAssocRefQueryCallback resultsCallback) {
ChildAssocEntity assoc = new ChildAssocEntity();
// Child
NodeEntity childNode = new NodeEntity();
childNode.setId(childNodeId);
assoc.setChildNode(childNode);
// Type QName
if (assocTypeQName != null) {
if (!assoc.setTypeQNameAll(qnameDAO, assocTypeQName, false)) {
resultsCallback.done();
// Shortcut
return;
}
}
// QName
if (assocQName != null) {
if (!assoc.setQNameAll(qnameDAO, assocQName, false)) {
resultsCallback.done();
// Shortcut
return;
}
}
// Primary
if (isPrimary != null) {
assoc.setPrimary(isPrimary);
}
ChildAssocResultHandler resultHandler = new ChildAssocResultHandler(resultsCallback);
template.select(SELECT_PARENT_ASSOCS_OF_CHILD, assoc, resultHandler);
resultsCallback.done();
}
Aggregations