Search in sources :

Example 1 with NodeDAO

use of org.alfresco.repo.domain.node.NodeDAO in project alfresco-repository by Alfresco.

the class SolrQueryHTTPClient method executeQuery.

public ResultSet executeQuery(final SearchParameters searchParameters, String language) {
    if (repositoryState.isBootstrapping()) {
        throw new AlfrescoRuntimeException("SOLR queries can not be executed while the repository is bootstrapping");
    }
    try {
        StoreRef store = SolrClientUtil.extractStoreRef(searchParameters);
        SolrStoreMappingWrapper mapping = SolrClientUtil.extractMapping(store, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
        Pair<HttpClient, String> httpClientAndBaseUrl = mapping.getHttpClientAndBaseUrl();
        HttpClient httpClient = httpClientAndBaseUrl.getFirst();
        URLCodec encoder = new URLCodec();
        StringBuilder url = new StringBuilder();
        url.append(httpClientAndBaseUrl.getSecond());
        String languageUrlFragment = SolrClientUtil.extractLanguageFragment(languageMappings, language);
        if (!url.toString().endsWith("/")) {
            url.append("/");
        }
        url.append(languageUrlFragment);
        // Send the query in JSON only
        // url.append("?q=");
        // url.append(encoder.encode(searchParameters.getQuery(), "UTF-8"));
        url.append("?wt=").append(encoder.encode("json", "UTF-8"));
        url.append("&fl=").append(encoder.encode("DBID,score", "UTF-8"));
        // Emulate old limiting behaviour and metadata
        final LimitBy limitBy;
        int maxResults = -1;
        if (searchParameters.getMaxItems() >= 0) {
            maxResults = searchParameters.getMaxItems();
            limitBy = LimitBy.FINAL_SIZE;
        } else if (searchParameters.getLimitBy() == LimitBy.FINAL_SIZE && searchParameters.getLimit() >= 0) {
            maxResults = searchParameters.getLimit();
            limitBy = LimitBy.FINAL_SIZE;
        } else {
            maxResults = searchParameters.getMaxPermissionChecks();
            if (maxResults < 0) {
                maxResults = maximumResultsFromUnlimitedQuery;
            }
            limitBy = LimitBy.NUMBER_OF_PERMISSION_EVALUATIONS;
        }
        url.append("&rows=").append(String.valueOf(maxResults));
        if ((searchParameters.getStores().size() > 1) || (mapping.isSharded())) {
            boolean requiresSeparator = false;
            url.append("&shards=");
            for (StoreRef storeRef : searchParameters.getStores()) {
                SolrStoreMappingWrapper storeMapping = SolrClientUtil.extractMapping(storeRef, mappingLookup, shardRegistry, useDynamicShardRegistration, beanFactory);
                if (requiresSeparator) {
                    url.append(',');
                } else {
                    requiresSeparator = true;
                }
                url.append(storeMapping.getShards());
            }
        }
        buildUrlParameters(searchParameters, mapping.isSharded(), encoder, url);
        final String searchTerm = searchParameters.getSearchTerm();
        String spellCheckQueryStr = null;
        if (searchTerm != null && searchParameters.isSpellCheck()) {
            StringBuilder builder = new StringBuilder();
            builder.append("&spellcheck.q=").append(encoder.encode(searchTerm, "UTF-8"));
            builder.append("&spellcheck=").append(encoder.encode("true", "UTF-8"));
            spellCheckQueryStr = builder.toString();
            url.append(spellCheckQueryStr);
        }
        JSONObject body = new JSONObject();
        body.put("query", searchParameters.getQuery());
        // Authorities go over as is - and tenant mangling and query building takes place on the SOLR side
        Set<String> allAuthorisations = permissionService.getAuthorisations();
        boolean includeGroups = includeGroupsForRoleAdmin ? true : !allAuthorisations.contains(PermissionService.ADMINISTRATOR_AUTHORITY);
        JSONArray authorities = new JSONArray();
        for (String authority : allAuthorisations) {
            if (includeGroups) {
                authorities.put(authority);
            } else {
                if (AuthorityType.getAuthorityType(authority) != AuthorityType.GROUP) {
                    authorities.put(authority);
                }
            }
        }
        body.put("authorities", authorities);
        body.put("anyDenyDenies", anyDenyDenies);
        JSONArray tenants = new JSONArray();
        tenants.put(tenantService.getCurrentUserDomain());
        body.put("tenants", tenants);
        JSONArray locales = new JSONArray();
        for (Locale currentLocale : searchParameters.getLocales()) {
            locales.put(DefaultTypeConverter.INSTANCE.convert(String.class, currentLocale));
        }
        if (locales.length() == 0) {
            locales.put(I18NUtil.getLocale());
        }
        body.put("locales", locales);
        JSONArray templates = new JSONArray();
        for (String templateName : searchParameters.getQueryTemplates().keySet()) {
            JSONObject template = new JSONObject();
            template.put("name", templateName);
            template.put("template", searchParameters.getQueryTemplates().get(templateName));
            templates.put(template);
        }
        body.put("templates", templates);
        JSONArray allAttributes = new JSONArray();
        for (String attribute : searchParameters.getAllAttributes()) {
            allAttributes.put(attribute);
        }
        body.put("allAttributes", allAttributes);
        body.put("defaultFTSOperator", searchParameters.getDefaultFTSOperator());
        body.put("defaultFTSFieldOperator", searchParameters.getDefaultFTSFieldOperator());
        body.put("queryConsistency", searchParameters.getQueryConsistency());
        if (searchParameters.getMlAnalaysisMode() != null) {
            body.put("mlAnalaysisMode", searchParameters.getMlAnalaysisMode().toString());
        }
        body.put("defaultNamespace", searchParameters.getNamespace());
        JSONArray textAttributes = new JSONArray();
        for (String attribute : searchParameters.getTextAttributes()) {
            textAttributes.put(attribute);
        }
        body.put("textAttributes", textAttributes);
        // just needed for the final parameter
        final int maximumResults = maxResults;
        return (ResultSet) postSolrQuery(httpClient, url.toString(), body, json -> {
            return new SolrJSONResultSet(json, searchParameters, nodeService, nodeDAO, limitBy, maximumResults);
        }, spellCheckQueryStr);
    } catch (UnsupportedEncodingException e) {
        throw new LuceneQueryParserException("", e);
    } catch (HttpException e) {
        throw new LuceneQueryParserException("", e);
    } catch (IOException e) {
        throw new LuceneQueryParserException("", e);
    } catch (JSONException e) {
        throw new LuceneQueryParserException("", e);
    }
}
Also used : Locale(java.util.Locale) SolrJsonProcessor(org.alfresco.repo.search.impl.lucene.SolrJsonProcessor) RangeParameters(org.alfresco.service.cmr.search.RangeParameters) URIException(org.apache.commons.httpclient.URIException) StringUtils(org.apache.commons.lang3.StringUtils) Header(org.apache.commons.httpclient.Header) DefaultTypeConverter(org.alfresco.service.cmr.repository.datatype.DefaultTypeConverter) JSONResult(org.alfresco.repo.search.impl.lucene.JSONResult) JSONException(org.json.JSONException) JSONObject(org.json.JSONObject) PermissionService(org.alfresco.service.cmr.security.PermissionService) RepositoryState(org.alfresco.repo.admin.RepositoryState) NodeDAO(org.alfresco.repo.domain.node.NodeDAO) Locale(java.util.Locale) Map(java.util.Map) NodeService(org.alfresco.service.cmr.repository.NodeService) HttpStatus(org.apache.commons.httpclient.HttpStatus) ResultSet(org.alfresco.service.cmr.search.ResultSet) StatsParameters(org.alfresco.service.cmr.search.StatsParameters) QueryParserUtils(org.alfresco.repo.search.impl.QueryParserUtils) Set(java.util.Set) ShardRegistry(org.alfresco.repo.index.shard.ShardRegistry) Reader(java.io.Reader) CMISStrictDictionaryService(org.alfresco.opencmis.dictionary.CMISStrictDictionaryService) GetMethod(org.apache.commons.httpclient.methods.GetMethod) List(java.util.List) IntervalSet(org.alfresco.service.cmr.search.IntervalSet) I18NUtil(org.springframework.extensions.surf.util.I18NUtil) HttpClient(org.apache.commons.httpclient.HttpClient) Entry(java.util.Map.Entry) Optional(java.util.Optional) LogFactory(org.apache.commons.logging.LogFactory) URI(org.apache.commons.httpclient.URI) UnsupportedEncodingException(java.io.UnsupportedEncodingException) LimitBy(org.alfresco.service.cmr.search.LimitBy) LuceneQueryParserException(org.alfresco.repo.search.impl.lucene.LuceneQueryParserException) Interval(org.alfresco.service.cmr.search.Interval) FieldFacet(org.alfresco.service.cmr.search.SearchParameters.FieldFacet) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) FieldFacetSort(org.alfresco.service.cmr.search.SearchParameters.FieldFacetSort) NamespaceDAO(org.alfresco.repo.dictionary.NamespaceDAO) ParameterCheck(org.alfresco.util.ParameterCheck) StatsRequestParameters(org.alfresco.service.cmr.search.StatsRequestParameters) HttpException(org.apache.commons.httpclient.HttpException) AuthorityType(org.alfresco.service.cmr.security.AuthorityType) StoreRef(org.alfresco.service.cmr.repository.StoreRef) Iterator(java.util.Iterator) FieldFacetMethod(org.alfresco.service.cmr.search.SearchParameters.FieldFacetMethod) SearchDateConversion.parseDateInterval(org.alfresco.util.SearchDateConversion.parseDateInterval) FieldHighlightParameters(org.alfresco.service.cmr.search.FieldHighlightParameters) HttpServletResponse(javax.servlet.http.HttpServletResponse) JSONTokener(org.json.JSONTokener) Pair(org.alfresco.util.Pair) IOException(java.io.IOException) AlfrescoRuntimeException(org.alfresco.error.AlfrescoRuntimeException) BeansException(org.springframework.beans.BeansException) InputStreamReader(java.io.InputStreamReader) TenantService(org.alfresco.repo.tenant.TenantService) BasicSearchParameters(org.alfresco.service.cmr.search.BasicSearchParameters) PermissionEvaluationMode(org.alfresco.service.cmr.search.PermissionEvaluationMode) DictionaryService(org.alfresco.service.cmr.dictionary.DictionaryService) SortDefinition(org.alfresco.service.cmr.search.SearchParameters.SortDefinition) DataTypeDefinition(org.alfresco.service.cmr.dictionary.DataTypeDefinition) SolrStatsResult(org.alfresco.repo.search.impl.lucene.SolrStatsResult) URLCodec(org.apache.commons.codec.net.URLCodec) StringJoiner(java.util.StringJoiner) BeanFactory(org.springframework.beans.factory.BeanFactory) PropertyCheck(org.alfresco.util.PropertyCheck) Log(org.apache.commons.logging.Log) BufferedReader(java.io.BufferedReader) Floc(org.alfresco.repo.index.shard.Floc) SearchParameters(org.alfresco.service.cmr.search.SearchParameters) PropertyDefinition(org.alfresco.service.cmr.dictionary.PropertyDefinition) SolrJSONResultSet(org.alfresco.repo.search.impl.lucene.SolrJSONResultSet) JSONArray(org.json.JSONArray) StoreRef(org.alfresco.service.cmr.repository.StoreRef) LimitBy(org.alfresco.service.cmr.search.LimitBy) JSONArray(org.json.JSONArray) UnsupportedEncodingException(java.io.UnsupportedEncodingException) JSONException(org.json.JSONException) SolrJSONResultSet(org.alfresco.repo.search.impl.lucene.SolrJSONResultSet) IOException(java.io.IOException) URLCodec(org.apache.commons.codec.net.URLCodec) JSONObject(org.json.JSONObject) LuceneQueryParserException(org.alfresco.repo.search.impl.lucene.LuceneQueryParserException) HttpClient(org.apache.commons.httpclient.HttpClient) ResultSet(org.alfresco.service.cmr.search.ResultSet) SolrJSONResultSet(org.alfresco.repo.search.impl.lucene.SolrJSONResultSet) AlfrescoRuntimeException(org.alfresco.error.AlfrescoRuntimeException) HttpException(org.apache.commons.httpclient.HttpException)

Example 2 with NodeDAO

use of org.alfresco.repo.domain.node.NodeDAO in project alfresco-repository by Alfresco.

the class NodeServiceTest method testLinkToDeletedNodeRecovery.

/**
 * Test for MNT-8494 - we should be able to recover when indexing encounters a node with deleted ancestors
 */
@Test
public void testLinkToDeletedNodeRecovery() throws Throwable {
    // First find any broken links to start with
    final NodeEntity params = new NodeEntity();
    params.setId(0L);
    params.setTypeQNameId(deletedTypeQNameId);
    List<Long> nodesWithDeletedParents = getChildNodesWithDeletedParentNode(params, 0);
    List<Long> deletedChildren = getDeletedChildren(params, 0);
    List<Long> nodesWithNoParents = getChildNodesWithNoParentNode(params, 0);
    logger.debug("Found child nodes with deleted parent node (before): " + nodesWithDeletedParents);
    final NodeRef[] nodeRefs = new NodeRef[10];
    final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
    buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
    int cnt = 5;
    final List<NodeRef> childNodeRefs = new ArrayList<NodeRef>(cnt);
    final NodeDAO nodeDAO = (NodeDAO) APP_CONTEXT_INIT.getApplicationContext().getBean("nodeDAO");
    for (int i = 0; i < cnt; i++) {
        // create some pseudo- thumnails
        String randomName = this.getClass().getName() + "-" + System.nanoTime();
        QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
        Map<QName, Serializable> props = new HashMap<QName, Serializable>();
        props.put(ContentModel.PROP_NAME, randomName);
        // Choose a random parent node from the hierarchy
        int random = new Random().nextInt(10);
        NodeRef parentNodeRef = nodeRefs[random];
        NodeRef childNodeRef = nodeService.createNode(parentNodeRef, ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_THUMBNAIL, props).getChildRef();
        childNodeRefs.add(childNodeRef);
    }
    // forcefully delete the root, a random connecting one, and a random leaf
    txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {

        @Override
        public Void execute() throws Throwable {
            Long nodeId = (Long) nodeService.getProperty(nodeRefs[0], ContentModel.PROP_NODE_DBID);
            nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
            nodeDAO.removeNodeAspects(nodeId);
            nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
            nodeId = (Long) nodeService.getProperty(nodeRefs[2], ContentModel.PROP_NODE_DBID);
            nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
            nodeDAO.removeNodeAspects(nodeId);
            nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
            nodeId = (Long) nodeService.getProperty(childNodeRefs.get(childNodeRefs.size() - 1), ContentModel.PROP_NODE_DBID);
            nodeDAO.updateNode(nodeId, ContentModel.TYPE_DELETED, null);
            nodeDAO.removeNodeAspects(nodeId);
            nodeDAO.removeNodeProperties(nodeId, nodeDAO.getNodeProperties(nodeId).keySet());
            return null;
        }
    });
    // Now need to identify the problem nodes
    final List<Long> childNodeIds = getChildNodesWithDeletedParentNode(params, nodesWithDeletedParents.size());
    assertFalse(childNodeIds.isEmpty());
    logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
    // Now visit the nodes in reverse order and do indexing-like things
    List<NodeRef> allNodeRefs = new ArrayList<NodeRef>(nodeRefs.length + childNodeRefs.size());
    allNodeRefs.addAll(Arrays.asList(nodeRefs));
    allNodeRefs.addAll(childNodeRefs);
    Collections.reverse(allNodeRefs);
    for (final NodeRef nodeRef : allNodeRefs) {
        txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {

            @Override
            public Void execute() throws Throwable {
                if (nodeService.exists(nodeRef)) {
                    try {
                        for (ChildAssociationRef parentRef : nodeService.getParentAssocs(nodeRef)) {
                            nodeService.getPath(parentRef.getParentRef());
                        }
                        // ignore return
                        nodeService.getPath(nodeRef);
                    } catch (InvalidNodeRefException e) {
                        throw new ConcurrencyFailureException("Deleted node - should be healed on retry", e);
                    }
                }
                return null;
            }
        });
    }
    // might
    for (final NodeRef nodeRef : allNodeRefs) {
        txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {

            @Override
            public Void execute() throws Throwable {
                nodeDAO.getNodePair(nodeRef);
                return null;
            }
        });
    }
    // Check again
    List<Long> nodeIds = getDeletedChildren(params, deletedChildren.size());
    assertTrue("The following deleted nodes still have parents: " + nodeIds, nodeIds.isEmpty());
    nodeIds = getChildNodesWithDeletedParentNode(params, nodesWithDeletedParents.size());
    assertTrue("The following child nodes have deleted parent nodes: " + nodeIds, nodeIds.isEmpty());
    nodeIds = getChildNodesWithNoParentNode(params, nodesWithNoParents.size());
    assertTrue("The following child nodes have no parent node: " + nodeIds, nodeIds.isEmpty());
    // check lost_found ...
    final List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
    assertFalse(lostAndFoundNodeRefs.isEmpty());
    final List<Long> lostAndFoundNodeIds = new ArrayList<Long>(lostAndFoundNodeRefs.size());
    txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {

        @Override
        public Void execute() throws Throwable {
            for (NodeRef nodeRef : lostAndFoundNodeRefs) {
                Long nodeId = nodeDAO.getNodePair(nodeRef).getFirst();
                lostAndFoundNodeIds.add(nodeId);
            }
            return null;
        }
    });
    for (final Long childNodeId : childNodeIds) {
        Boolean exists = txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Boolean>() {

            @Override
            public Boolean execute() throws Throwable {
                return nodeDAO.exists(childNodeId);
            }
        });
        assertTrue("Not found: " + childNodeId, lostAndFoundNodeIds.contains(childNodeId) || !exists);
    }
}
Also used : Serializable(java.io.Serializable) HashMap(java.util.HashMap) QName(org.alfresco.service.namespace.QName) ArrayList(java.util.ArrayList) ChildAssociationRef(org.alfresco.service.cmr.repository.ChildAssociationRef) NodeDAO(org.alfresco.repo.domain.node.NodeDAO) NodeRef(org.alfresco.service.cmr.repository.NodeRef) Random(java.util.Random) ConcurrencyFailureException(org.springframework.dao.ConcurrencyFailureException) InvalidNodeRefException(org.alfresco.service.cmr.repository.InvalidNodeRefException) NodeEntity(org.alfresco.repo.domain.node.NodeEntity) CannedQueryDAOTest(org.alfresco.repo.domain.query.CannedQueryDAOTest) Test(org.junit.Test)

Example 3 with NodeDAO

use of org.alfresco.repo.domain.node.NodeDAO in project alfresco-repository by Alfresco.

the class NodeServiceTest method testForceNonRootNodeWithNoParentNode.

/**
 * Pending repeatable test - force issue ALF-ALF-13066 (non-root node with no parent)
 */
@Test
public void testForceNonRootNodeWithNoParentNode() throws Throwable {
    // First find any broken links to start with
    final NodeEntity params = new NodeEntity();
    params.setId(0L);
    params.setTypeQNameId(deletedTypeQNameId);
    List<Long> ids = getChildNodesWithNoParentNode(params, 0);
    logger.debug("Found child nodes with deleted parent node (before): " + ids);
    final int idsToSkip = ids.size();
    final NodeRef[] nodeRefs = new NodeRef[10];
    final NodeRef workspaceRootNodeRef = nodeService.getRootNode(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
    buildNodeHierarchy(workspaceRootNodeRef, nodeRefs);
    int cnt = 5;
    List<NodeRef> childNodeRefs = new ArrayList<NodeRef>(cnt);
    final NodeDAO nodeDAO = (NodeDAO) APP_CONTEXT_INIT.getApplicationContext().getBean("nodeDAO");
    for (int i = 0; i < cnt; i++) {
        // create some pseudo- thumnails
        String randomName = this.getClass().getName() + "-" + System.nanoTime();
        QName randomQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, randomName);
        Map<QName, Serializable> props = new HashMap<QName, Serializable>();
        props.put(ContentModel.PROP_NAME, randomName);
        // Choose a random parent node from the hierarchy
        int random = new Random().nextInt(10);
        NodeRef parentNodeRef = nodeRefs[random];
        NodeRef childNodeRef = nodeService.createNode(parentNodeRef, ContentModel.ASSOC_CONTAINS, randomQName, ContentModel.TYPE_THUMBNAIL, props).getChildRef();
        childNodeRefs.add(childNodeRef);
        // forcefully remove the primary parent assoc
        final Long childNodeId = (Long) nodeService.getProperty(childNodeRef, ContentModel.PROP_NODE_DBID);
        txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {

            @Override
            public Void execute() throws Throwable {
                Pair<Long, ChildAssociationRef> assocPair = nodeDAO.getPrimaryParentAssoc(childNodeId);
                nodeDAO.deleteChildAssoc(assocPair.getFirst());
                return null;
            }
        });
    }
    // Now need to identify the problem nodes
    final List<Long> childNodeIds = getChildNodesWithNoParentNode(params, idsToSkip);
    assertFalse(childNodeIds.isEmpty());
    logger.debug("Found child nodes with deleted parent node (after): " + childNodeIds);
    // workaround recovery: force collection of any orphan nodes (ALF-12358 + ALF-13066)
    for (final NodeRef nodeRef : childNodeRefs) {
        txnService.getRetryingTransactionHelper().doInTransaction(new RetryingTransactionCallback<Void>() {

            @Override
            public Void execute() throws Throwable {
                if (nodeService.exists(nodeRef)) {
                    // ignore return
                    nodeService.getPath(nodeRef);
                }
                return null;
            }
        });
    }
    // check again ...
    ids = getChildNodesWithNoParentNode(params, idsToSkip);
    assertTrue("The following child nodes have no parent node: " + ids, ids.isEmpty());
    // check lost_found ...
    List<NodeRef> lostAndFoundNodeRefs = getLostAndFoundNodes();
    assertFalse(lostAndFoundNodeRefs.isEmpty());
    List<Long> lostAndFoundNodeIds = new ArrayList<Long>(lostAndFoundNodeRefs.size());
    for (NodeRef nodeRef : lostAndFoundNodeRefs) {
        lostAndFoundNodeIds.add((Long) nodeService.getProperty(nodeRef, ContentModel.PROP_NODE_DBID));
    }
    for (Long childNodeId : childNodeIds) {
        assertTrue("Not found: " + childNodeId, lostAndFoundNodeIds.contains(childNodeId) || !nodeDAO.exists(childNodeId));
    }
}
Also used : Serializable(java.io.Serializable) HashMap(java.util.HashMap) QName(org.alfresco.service.namespace.QName) ArrayList(java.util.ArrayList) NodeDAO(org.alfresco.repo.domain.node.NodeDAO) NodeRef(org.alfresco.service.cmr.repository.NodeRef) Random(java.util.Random) NodeEntity(org.alfresco.repo.domain.node.NodeEntity) Pair(org.alfresco.util.Pair) CannedQueryDAOTest(org.alfresco.repo.domain.query.CannedQueryDAOTest) Test(org.junit.Test)

Aggregations

ArrayList (java.util.ArrayList)3 HashMap (java.util.HashMap)3 NodeDAO (org.alfresco.repo.domain.node.NodeDAO)3 Serializable (java.io.Serializable)2 Random (java.util.Random)2 NodeEntity (org.alfresco.repo.domain.node.NodeEntity)2 CannedQueryDAOTest (org.alfresco.repo.domain.query.CannedQueryDAOTest)2 NodeRef (org.alfresco.service.cmr.repository.NodeRef)2 QName (org.alfresco.service.namespace.QName)2 Pair (org.alfresco.util.Pair)2 Test (org.junit.Test)2 BufferedReader (java.io.BufferedReader)1 IOException (java.io.IOException)1 InputStreamReader (java.io.InputStreamReader)1 Reader (java.io.Reader)1 UnsupportedEncodingException (java.io.UnsupportedEncodingException)1 Iterator (java.util.Iterator)1 List (java.util.List)1 Locale (java.util.Locale)1 Map (java.util.Map)1