use of org.alfresco.repo.domain.node.NodeDAO in project alfresco-repository by Alfresco.
the class SolrQueryHTTPClient method executeQuery.
public ResultSet executeQuery(final SearchParameters searchParameters, String language) {
if (repositoryState.isBootstrapping()) {
throw new AlfrescoRuntimeException("SOLR queries can not be executed while the repository is bootstrapping");
}
try {
StoreRef store = SolrClientUtil.extractStoreRef(searchParameters);
SolrStoreMappingWrapper mapping = SolrClientUtil.extractMapping(store, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
Pair<HttpClient, String> httpClientAndBaseUrl = mapping.getHttpClientAndBaseUrl();
HttpClient httpClient = httpClientAndBaseUrl.getFirst();
URLCodec encoder = new URLCodec();
StringBuilder url = new StringBuilder();
url.append(httpClientAndBaseUrl.getSecond());
String languageUrlFragment = SolrClientUtil.extractLanguageFragment(languageMappings, language);
if (!url.toString().endsWith("/")) {
url.append("/");
}
url.append(languageUrlFragment);
// Send the query in JSON only
// url.append("?q=");
// url.append(encoder.encode(searchParameters.getQuery(), "UTF-8"));
url.append("?wt=").append(encoder.encode("json", "UTF-8"));
url.append("&fl=").append(encoder.encode("DBID,score", "UTF-8"));
// Emulate old limiting behaviour and metadata
final LimitBy limitBy;
int maxResults = -1;
if (searchParameters.getMaxItems() >= 0) {
maxResults = searchParameters.getMaxItems();
limitBy = LimitBy.FINAL_SIZE;
} else if (searchParameters.getLimitBy() == LimitBy.FINAL_SIZE && searchParameters.getLimit() >= 0) {
maxResults = searchParameters.getLimit();
limitBy = LimitBy.FINAL_SIZE;
} else {
maxResults = searchParameters.getMaxPermissionChecks();
if (maxResults < 0) {
maxResults = maximumResultsFromUnlimitedQuery;
}
limitBy = LimitBy.NUMBER_OF_PERMISSION_EVALUATIONS;
}
url.append("&rows=").append(String.valueOf(maxResults));
if ((searchParameters.getStores().size() > 1) || (mapping.isSharded())) {
boolean requiresSeparator = false;
url.append("&shards=");
for (StoreRef storeRef : searchParameters.getStores()) {
SolrStoreMappingWrapper storeMapping = SolrClientUtil.extractMapping(storeRef, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
if (requiresSeparator) {
url.append(',');
} else {
requiresSeparator = true;
}
url.append(storeMapping.getShards());
}
}
buildUrlParameters(searchParameters, mapping.isSharded(), encoder, url);
final String searchTerm = searchParameters.getSearchTerm();
String spellCheckQueryStr = null;
if (searchTerm != null && searchParameters.isSpellCheck()) {
StringBuilder builder = new StringBuilder();
builder.append("&spellcheck.q=").append(encoder.encode(searchTerm, "UTF-8"));
builder.append("&spellcheck=").append(encoder.encode("true", "UTF-8"));
spellCheckQueryStr = builder.toString();
url.append(spellCheckQueryStr);
}
JSONObject body = new JSONObject();
body.put("query", searchParameters.getQuery());
// Authorities go over as is - and tenant mangling and query building takes place on the SOLR side
Set<String> allAuthorisations = permissionService.getAuthorisations();
boolean includeGroups = includeGroupsForRoleAdmin ? true : !allAuthorisations.contains(PermissionService.ADMINISTRATOR_AUTHORITY);
JSONArray authorities = new JSONArray();
for (String authority : allAuthorisations) {
if (includeGroups) {
authorities.put(authority);
} else {
if (AuthorityType.getAuthorityType(authority) != AuthorityType.GROUP) {
authorities.put(authority);
}
}
}
body.put("authorities", authorities);
body.put("anyDenyDenies", anyDenyDenies);
JSONArray tenants = new JSONArray();
tenants.put(tenantService.getCurrentUserDomain());
body.put("tenants", tenants);
JSONArray locales = new JSONArray();
for (Locale currentLocale : searchParameters.getLocales()) {
locales.put(DefaultTypeConverter.INSTANCE.convert(String.class, currentLocale));
}
if (locales.length() == 0) {
locales.put(I18NUtil.getLocale());
}
body.put("locales", locales);
JSONArray templates = new JSONArray();
for (String templateName : searchParameters.getQueryTemplates().keySet()) {
JSONObject template = new JSONObject();
template.put("name", templateName);
template.put("template", searchParameters.getQueryTemplates().get(templateName));
templates.put(template);
}
body.put("templates", templates);
JSONArray allAttributes = new JSONArray();
for (String attribute : searchParameters.getAllAttributes()) {
allAttributes.put(attribute);
}
body.put("allAttributes", allAttributes);
body.put("defaultFTSOperator", searchParameters.getDefaultFTSOperator());
body.put("defaultFTSFieldOperator", searchParameters.getDefaultFTSFieldOperator());
body.put("queryConsistency", searchParameters.getQueryConsistency());
if (searchParameters.getMlAnalaysisMode() != null) {
body.put("mlAnalaysisMode", searchParameters.getMlAnalaysisMode().toString());
}
body.put("defaultNamespace", searchParameters.getNamespace());
JSONArray textAttributes = new JSONArray();
for (String attribute : searchParameters.getTextAttributes()) {
textAttributes.put(attribute);
}
body.put("textAttributes", textAttributes);
// just needed for the final parameter
final int maximumResults = maxResults;
return (ResultSet) postSolrQuery(httpClient, url.toString(), body, json -> {
return new SolrJSONResultSet(json, searchParameters, nodeService, nodeDAO, limitBy, maximumResults);
}, spellCheckQueryStr);
} catch (UnsupportedEncodingException e) {
throw new LuceneQueryParserException("", e);
} catch (HttpException e) {
throw new LuceneQueryParserException("", e);
} catch (IOException e) {
throw new LuceneQueryParserException("", e);
} catch (JSONException e) {
throw new LuceneQueryParserException("", e);
}
}
use of org.alfresco.repo.domain.node.NodeDAO in project alfresco-repository by Alfresco.
the class NodeServiceTest method testLinkToDeletedNodeRecovery.
/**
* Test for MNT-8494 - we should be able to recover when indexing encounters a node with deleted ancestors
*/
@Test
public void testLinkToDeletedNodeRecovery() throws Throwable {
// First find any broken links to start with
final NodeEntity params = new NodeEntity();
params.setId(0L);
params.setTypeQNameId(deletedTypeQNameId);
List<Long> nodesWithDeletedParents = getChildNodesWithDeletedParentNode(params, 0);
List<Long> deletedChildren = getDeletedChildren(params, 0);
List<Long> nodesWithNoParents = getChildNodesWithNoParentNode(params, 0);
logger.debug("Found child nodes with deleted parent node (before): " + nodesWithDeletedParents);
final NodeRef[] nodeRefs = new NodeRef[10];
final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
int cnt = 5;
final List<NodeRef> childNodeRefs = new ArrayList<NodeRef>(cnt);
final NodeDAO nodeDAO = (NodeDAO) APP_CONTEXT_INIT.getApplicationContext().getBean("nodeDAO");
for (int i = 0; i < cnt; i++) {
// create some pseudo- thumnails
String randomName = this.getClass().getName() + "-" + System.nanoTime();
QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
Map<QName, Serializable> props = new HashMap<QName, Serializable>();
props.put(ContentModel.PROP_NAME, randomName);
// Choose a random parent node from the hierarchy
int random = new Random().nextInt(10);
NodeRef parentNodeRef = nodeRefs[random];
NodeRef childNodeRef = nodeService.createNode(parentNodeRef, ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_THUMBNAIL, props).getChildRef();
childNodeRefs.add(childNodeRef);
}
// forcefully delete the root, a random connecting one, and a random leaf
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
Long nodeId = (Long) nodeService.getProperty(nodeRefs[0], ContentModel.PROP_NODE_DBID);
nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
nodeDAO.removeNodeAspects(nodeId);
nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
nodeId = (Long) nodeService.getProperty(nodeRefs[2], ContentModel.PROP_NODE_DBID);
nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
nodeDAO.removeNodeAspects(nodeId);
nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
nodeId = (Long) nodeService.getProperty(childNodeRefs.get(childNodeRefs.size() - 1), ContentModel.PROP_NODE_DBID);
nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
nodeDAO.removeNodeAspects(nodeId);
nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
return null;
}
});
// Now need to identify the problem nodes
final List<Long> childNodeIds = getChildNodesWithDeletedParentNode(params, nodesWithDeletedParents.size());
assertFalse(childNodeIds.isEmpty());
logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
// Now visit the nodes in reverse order and do indexing-like things
List<NodeRef> allNodeRefs = new ArrayList<NodeRef>(nodeRefs.length + childNodeRefs.size());
allNodeRefs.addAll(Arrays.asList(nodeRefs));
allNodeRefs.addAll(childNodeRefs);
Collections.reverse(allNodeRefs);
for (final NodeRef nodeRef : allNodeRefs) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
if (nodeService.exists(nodeRef)) {
try {
for (ChildAssociationRef parentRef : nodeService.getParentAssocs(nodeRef)) {
nodeService.getPath(parentRef.getParentRef());
}
// ignore return
nodeService.getPath(nodeRef);
} catch (InvalidNodeRefException e) {
throw new ConcurrencyFailureException("Deleted node - should be healed on retry", e);
}
}
return null;
}
});
}
// might
for (final NodeRef nodeRef : allNodeRefs) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
nodeDAO.getNodePair(nodeRef);
return null;
}
});
}
// Check again
List<Long> nodeIds = getDeletedChildren(params, deletedChildren.size());
assertTrue("The following deleted nodes still have parents: " + nodeIds, nodeIds.isEmpty());
nodeIds = getChildNodesWithDeletedParentNode(params, nodesWithDeletedParents.size());
assertTrue("The following child nodes have deleted parent nodes: " + nodeIds, nodeIds.isEmpty());
nodeIds = getChildNodesWithNoParentNode(params, nodesWithNoParents.size());
assertTrue("The following child nodes have no parent node: " + nodeIds, nodeIds.isEmpty());
// check lost_found ...
final List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
assertFalse(lostAndFoundNodeRefs.isEmpty());
final List<Long> lostAndFoundNodeIds = new ArrayList<Long>(lostAndFoundNodeRefs.size());
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
for (NodeRef nodeRef : lostAndFoundNodeRefs) {
Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
lostAndFoundNodeIds.add(nodeId);
}
return null;
}
});
for (final Long childNodeId : childNodeIds) {
Boolean exists = txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Boolean>() {
@Override
public Boolean execute() throws Throwable {
return nodeDAO.exists(childNodeId);
}
});
assertTrue("Not found: " + childNodeId, lostAndFoundNodeIds.contains(childNodeId) || !exists);
}
}
use of org.alfresco.repo.domain.node.NodeDAO in project alfresco-repository by Alfresco.
the class NodeServiceTest method testForceNonRootNodeWithNoParentNode.
/**
* Pending repeatable test - force issue ALF-ALF-13066 (non-root node with no parent)
*/
@Test
public void testForceNonRootNodeWithNoParentNode() throws Throwable {
// First find any broken links to start with
final NodeEntity params = new NodeEntity();
params.setId(0L);
params.setTypeQNameId(deletedTypeQNameId);
List<Long> ids = getChildNodesWithNoParentNode(params, 0);
logger.debug("Found child nodes with deleted parent node (before): " + ids);
final int idsToSkip = ids.size();
final NodeRef[] nodeRefs = new NodeRef[10];
final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
int cnt = 5;
List<NodeRef> childNodeRefs = new ArrayList<NodeRef>(cnt);
final NodeDAO nodeDAO = (NodeDAO) APP_CONTEXT_INIT.getApplicationContext().getBean("nodeDAO");
for (int i = 0; i < cnt; i++) {
// create some pseudo- thumnails
String randomName = this.getClass().getName() + "-" + System.nanoTime();
QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
Map<QName, Serializable> props = new HashMap<QName, Serializable>();
props.put(ContentModel.PROP_NAME, randomName);
// Choose a random parent node from the hierarchy
int random = new Random().nextInt(10);
NodeRef parentNodeRef = nodeRefs[random];
NodeRef childNodeRef = nodeService.createNode(parentNodeRef, ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_THUMBNAIL, props).getChildRef();
childNodeRefs.add(childNodeRef);
// forcefully remove the primary parent assoc
final Long childNodeId = (Long) nodeService.getProperty(childNodeRef, ContentModel.PROP_NODE_DBID);
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
Pair<Long, ChildAssociationRef> assocPair = nodeDAO.getPrimaryParentAssoc(childNodeId);
nodeDAO.deleteChildAssoc(assocPair.getFirst());
return null;
}
});
}
// Now need to identify the problem nodes
final List<Long> childNodeIds = getChildNodesWithNoParentNode(params, idsToSkip);
assertFalse(childNodeIds.isEmpty());
logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
// workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
for (final NodeRef nodeRef : childNodeRefs) {
txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {
@Override
public Void execute() throws Throwable {
if (nodeService.exists(nodeRef)) {
// ignore return
nodeService.getPath(nodeRef);
}
return null;
}
});
}
// check again ...
ids = getChildNodesWithNoParentNode(params, idsToSkip);
assertTrue("The following child nodes have no parent node: " + ids, ids.isEmpty());
// check lost_found ...
List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
assertFalse(lostAndFoundNodeRefs.isEmpty());
List<Long> lostAndFoundNodeIds = new ArrayList<Long>(lostAndFoundNodeRefs.size());
for (NodeRef nodeRef : lostAndFoundNodeRefs) {
lostAndFoundNodeIds.add((Long) nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
}
for (Long childNodeId : childNodeIds) {
assertTrue("Not found: " + childNodeId, lostAndFoundNodeIds.contains(childNodeId) || !nodeDAO.exists(childNodeId));
}
}
Aggregations