Search in sources :

Example 1 with NodeMetaDataParameters

use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.

the class SolrInformationServer method cascadeUpdateV2.

private void cascadeUpdateV2(NodeMetaData parentNodeMetaData, boolean overwrite, SolrQueryRequest request, UpdateRequestProcessor processor) throws AuthenticationException, IOException, JSONException {
    // System.out.println("################ Cascade update V2 !");
    RefCounted<SolrIndexSearcher> refCounted = null;
    IntArrayList docList = null;
    HashSet<Long> childIds = new HashSet<Long>();
    try {
        refCounted = core.getSearcher();
        SolrIndexSearcher searcher = refCounted.get();
        BooleanQuery.Builder builder = new BooleanQuery.Builder();
        TermQuery termQuery = new TermQuery(new Term(FIELD_ANCESTOR, parentNodeMetaData.getNodeRef().toString()));
        BooleanClause booleanClause = new BooleanClause(termQuery, BooleanClause.Occur.MUST);
        builder.add(booleanClause);
        BooleanQuery booleanQuery = builder.build();
        // System.out.println("################ ANCESTOR QUERY:"+booleanQuery.toString());
        DocListCollector collector = new DocListCollector();
        searcher.search(booleanQuery, collector);
        docList = collector.getDocs();
        int size = docList.size();
        Set set = new HashSet();
        set.add(FIELD_SOLR4_ID);
        for (int i = 0; i < size; i++) {
            int docId = docList.get(i);
            Document document = searcher.doc(docId, set);
            IndexableField indexableField = document.getField(FIELD_SOLR4_ID);
            String id = indexableField.stringValue();
            TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
            childIds.add(ids.dbId);
        }
    } finally {
        refCounted.decref();
    }
    for (Long childId : childIds) {
        NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
        nmdp.setFromNodeId(childId);
        nmdp.setToNodeId(childId);
        nmdp.setIncludeAclId(false);
        nmdp.setIncludeAspects(false);
        nmdp.setIncludeChildAssociations(false);
        nmdp.setIncludeChildIds(true);
        nmdp.setIncludeNodeRef(false);
        nmdp.setIncludeOwner(false);
        nmdp.setIncludeParentAssociations(false);
        // We only care about the path and ancestors (which is included) for this case
        nmdp.setIncludePaths(true);
        nmdp.setIncludeProperties(false);
        nmdp.setIncludeType(false);
        nmdp.setIncludeTxnId(true);
        // Gets only one
        List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1);
        if (!nodeMetaDatas.isEmpty()) {
            NodeMetaData nodeMetaData = nodeMetaDatas.get(0);
            // We do not bring in changes from the future as nodes may switch shards and we do not want the logic here.
            if (nodeMetaData.getTxnId() < parentNodeMetaData.getTxnId()) {
                long nodeId = nodeMetaData.getId();
                try {
                    if (!spinLock(nodeId, 120000)) {
                        throw new IOException("Unable to acquire spinlock on:" + nodeId);
                    }
                    // System.out.println("################ Starting CASCADE UPDATE:"+nodeMetaData.getId());
                    if (log.isDebugEnabled()) {
                        log.debug("... cascade update child doc " + childId);
                    }
                    // Gets the document that we have from the content store and updates it
                    String fixedTenantDomain = AlfrescoSolrDataModel.getTenantId(nodeMetaData.getTenantDomain());
                    SolrInputDocument cachedDoc = solrContentStore.retrieveDocFromSolrContentStore(fixedTenantDomain, nodeMetaData.getId());
                    if (cachedDoc == null) {
                        cachedDoc = recreateSolrDoc(nodeMetaData.getId(), fixedTenantDomain);
                        // This is a work around for ACE-3228/ACE-3258 and the way stores are expunged when deleting a tenant
                        if (cachedDoc == null) {
                            deleteNode(processor, request, nodeMetaData.getId());
                        }
                    }
                    if (cachedDoc != null) {
                        updatePathRelatedFields(nodeMetaData, cachedDoc);
                        updateNamePathRelatedFields(nodeMetaData, cachedDoc);
                        updateAncestorRelatedFields(nodeMetaData, cachedDoc);
                        AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
                        addDocCmd.overwrite = overwrite;
                        addDocCmd.solrDoc = cachedDoc;
                        // System.out.println("######## Final Cascade Doc :"+cachedDoc);
                        processor.processAdd(addDocCmd);
                        solrContentStore.storeDocOnSolrContentStore(fixedTenantDomain, nodeMetaData.getId(), cachedDoc);
                    } else {
                        if (log.isDebugEnabled()) {
                            log.debug("... no child doc found to update " + childId);
                        }
                    }
                } finally {
                    unlock(nodeId);
                }
            }
        }
    }
}
Also used : BooleanQuery(org.apache.lucene.search.BooleanQuery) Set(java.util.Set) AclChangeSet(org.alfresco.solr.client.AclChangeSet) LinkedHashSet(java.util.LinkedHashSet) IOpenBitSet(org.alfresco.solr.adapters.IOpenBitSet) HashSet(java.util.HashSet) TenantAclIdDbId(org.alfresco.solr.AlfrescoSolrDataModel.TenantAclIdDbId) BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) Document(org.apache.lucene.document.Document) SolrInputDocument(org.apache.solr.common.SolrInputDocument) SolrDocument(org.apache.solr.common.SolrDocument) SolrInputDocument(org.apache.solr.common.SolrInputDocument) NodeMetaDataParameters(org.alfresco.solr.client.NodeMetaDataParameters) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) TermQuery(org.apache.lucene.search.TermQuery) NodeMetaData(org.alfresco.solr.client.NodeMetaData) SolrIndexSearcher(org.apache.solr.search.SolrIndexSearcher) Term(org.apache.lucene.index.Term) IOException(java.io.IOException) BooleanClause(org.apache.lucene.search.BooleanClause) IndexableField(org.apache.lucene.index.IndexableField) IntArrayList(com.carrotsearch.hppc.IntArrayList) AddUpdateCommand(org.apache.solr.update.AddUpdateCommand)

Example 2 with NodeMetaDataParameters

use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.

the class SolrInformationServer method doUpdateDescendantDocs.

private void doUpdateDescendantDocs(NodeMetaData parentNodeMetaData, boolean overwrite, SolrQueryRequest request, UpdateRequestProcessor processor, LinkedHashSet<Long> stack) throws AuthenticationException, IOException, JSONException {
    if ((skipDescendantDocsForSpecificTypes && typesForSkippingDescendantDocs.contains(parentNodeMetaData.getType())) || (skipDescendantDocsForSpecificAspects && shouldBeIgnoredByAnyAspect(parentNodeMetaData.getAspects()))) {
        return;
    }
    HashSet<Long> childIds = new HashSet<Long>();
    if (parentNodeMetaData.getChildIds() != null) {
        childIds.addAll(parentNodeMetaData.getChildIds());
    }
    String query = FIELD_PARENT + ":\"" + parentNodeMetaData.getNodeRef() + "\"";
    ModifiableSolrParams params = new ModifiableSolrParams(request.getParams());
    params.set("q", query).set("fl", FIELD_SOLR4_ID);
    if (skippingDocsQueryString != null && !skippingDocsQueryString.isEmpty()) {
        params.set("fq", "NOT ( " + skippingDocsQueryString + " )");
    }
    SolrDocumentList docs = cloud.getSolrDocumentList(nativeRequestHandler, request, params);
    for (SolrDocument doc : docs) {
        String id = getFieldValueString(doc, FIELD_SOLR4_ID);
        TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
        childIds.add(ids.dbId);
    }
    for (Long childId : childIds) {
        NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
        nmdp.setFromNodeId(childId);
        nmdp.setToNodeId(childId);
        nmdp.setIncludeAclId(false);
        nmdp.setIncludeAspects(false);
        nmdp.setIncludeChildAssociations(false);
        nmdp.setIncludeChildIds(true);
        nmdp.setIncludeNodeRef(false);
        nmdp.setIncludeOwner(false);
        nmdp.setIncludeParentAssociations(false);
        // We only care about the path and ancestors (which is included) for this case
        nmdp.setIncludePaths(true);
        nmdp.setIncludeProperties(false);
        nmdp.setIncludeType(false);
        nmdp.setIncludeTxnId(false);
        // Gets only one
        List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1);
        if (!nodeMetaDatas.isEmpty()) {
            NodeMetaData nodeMetaData = nodeMetaDatas.get(0);
            if (mayHaveChildren(nodeMetaData)) {
                updateDescendantDocs(nodeMetaData, overwrite, request, processor, stack);
            }
            try {
                if (!spinLock(childId, 120000)) {
                    // We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
                    throw new IOException("Unable to acquire lock on nodeId:" + childId);
                }
                if (log.isDebugEnabled()) {
                    log.debug("... cascade update child doc " + childId);
                }
                // Gets the document that we have from the content store and updates it
                String fixedTenantDomain = AlfrescoSolrDataModel.getTenantId(nodeMetaData.getTenantDomain());
                SolrInputDocument cachedDoc = solrContentStore.retrieveDocFromSolrContentStore(fixedTenantDomain, nodeMetaData.getId());
                if (cachedDoc != null) {
                    updatePathRelatedFields(nodeMetaData, cachedDoc);
                    updateNamePathRelatedFields(nodeMetaData, cachedDoc);
                    updateAncestorRelatedFields(nodeMetaData, cachedDoc);
                    AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
                    addDocCmd.overwrite = overwrite;
                    addDocCmd.solrDoc = cachedDoc;
                    processor.processAdd(addDocCmd);
                    solrContentStore.storeDocOnSolrContentStore(fixedTenantDomain, nodeMetaData.getId(), cachedDoc);
                } else {
                    if (log.isDebugEnabled()) {
                        log.debug("... no child doc found to update " + childId);
                    }
                }
            } finally {
                unlock(childId);
            }
        }
    }
}
Also used : NodeMetaData(org.alfresco.solr.client.NodeMetaData) TenantAclIdDbId(org.alfresco.solr.AlfrescoSolrDataModel.TenantAclIdDbId) SolrDocumentList(org.apache.solr.common.SolrDocumentList) IOException(java.io.IOException) ModifiableSolrParams(org.apache.solr.common.params.ModifiableSolrParams) SolrInputDocument(org.apache.solr.common.SolrInputDocument) SolrDocument(org.apache.solr.common.SolrDocument) NodeMetaDataParameters(org.alfresco.solr.client.NodeMetaDataParameters) AddUpdateCommand(org.apache.solr.update.AddUpdateCommand) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet)

Example 3 with NodeMetaDataParameters

use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.

the class SolrInformationServer method indexNodes.

@Override
public void indexNodes(List<Node> nodes, boolean overwrite, boolean cascade) throws IOException, AuthenticationException, JSONException {
    SolrQueryRequest request = null;
    UpdateRequestProcessor processor = null;
    try {
        request = getLocalSolrQueryRequest();
        processor = this.core.getUpdateProcessingChain(null).createProcessor(request, new SolrQueryResponse());
        Map<Long, Node> nodeIdsToNodes = new HashMap<>();
        EnumMap<SolrApiNodeStatus, List<Long>> nodeStatusToNodeIds = new EnumMap<SolrApiNodeStatus, List<Long>>(SolrApiNodeStatus.class);
        categorizeNodes(nodes, nodeIdsToNodes, nodeStatusToNodeIds);
        List<Long> deletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.DELETED));
        List<Long> shardDeletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_DELETED));
        List<Long> shardUpdatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_UPDATED));
        List<Long> unknownNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UNKNOWN));
        List<Long> updatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UPDATED));
        if (!deletedNodeIds.isEmpty() || !shardDeletedNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty()) {
            // fix up any secondary paths
            List<NodeMetaData> nodeMetaDatas = new ArrayList<>();
            // For all deleted nodes, fake the node metadata
            for (Long deletedNodeId : deletedNodeIds) {
                Node node = nodeIdsToNodes.get(deletedNodeId);
                NodeMetaData nodeMetaData = createDeletedNodeMetaData(node);
                nodeMetaDatas.add(nodeMetaData);
            }
            if (!unknownNodeIds.isEmpty()) {
                NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
                nmdp.setNodeIds(unknownNodeIds);
                nodeMetaDatas.addAll(repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE));
            }
            for (NodeMetaData nodeMetaData : nodeMetaDatas) {
                Node node = nodeIdsToNodes.get(nodeMetaData.getId());
                if (nodeMetaData.getTxnId() > node.getTxnId()) {
                    // it will be indexed later
                    continue;
                }
                if (nodeMetaData != null) {
                    try {
                        // Lock the node to ensure that no other trackers work with this node until this code completes.
                        if (!spinLock(nodeMetaData.getId(), 120000)) {
                            // We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
                            throw new Exception("Unable to acquire lock on nodeId:" + nodeMetaData.getId());
                        }
                        solrContentStore.removeDocFromContentStore(nodeMetaData);
                    } finally {
                        unlock(nodeMetaData.getId());
                    }
                }
            }
            if (log.isDebugEnabled()) {
                log.debug(".. deleting");
            }
            DeleteUpdateCommand delDocCmd = new DeleteUpdateCommand(request);
            String query = this.cloud.getQuery(FIELD_DBID, OR, deletedNodeIds, shardDeletedNodeIds, shardUpdatedNodeIds, unknownNodeIds);
            delDocCmd.setQuery(query);
            processor.processDelete(delDocCmd);
        }
        if (!updatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty()) {
            log.info(".. updating");
            NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
            List<Long> nodeIds = new LinkedList<>();
            nodeIds.addAll(updatedNodeIds);
            nodeIds.addAll(unknownNodeIds);
            nodeIds.addAll(shardUpdatedNodeIds);
            nmdp.setNodeIds(nodeIds);
            // Fetches bulk metadata
            List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE);
            NEXT_NODE: for (NodeMetaData nodeMetaData : nodeMetaDatas) {
                // System.out.println("####################### NodeMetaData:"+ nodeMetaData.getId());
                long start = System.nanoTime();
                Node node = nodeIdsToNodes.get(nodeMetaData.getId());
                long nodeId = node.getId();
                try {
                    // Lock the node to ensure that no other trackers work with this node until this code completes.
                    if (!spinLock(nodeId, 120000)) {
                        // We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
                        throw new Exception("Unable to acquire lock on nodeId:" + nodeId);
                    }
                    if (nodeMetaData.getTxnId() > node.getTxnId()) {
                        // it will be indexed later
                        continue;
                    }
                    if (nodeIdsToNodes.get(nodeMetaData.getId()).getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED) {
                        if (nodeMetaData.getProperties().get(ContentModel.PROP_CASCADE_TX) != null) {
                            indexNonShardCascade(nodeMetaData);
                        }
                        continue;
                    }
                    AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
                    addDocCmd.overwrite = overwrite;
                    // check index control
                    Map<QName, PropertyValue> properties = nodeMetaData.getProperties();
                    StringPropertyValue pValue = (StringPropertyValue) properties.get(ContentModel.PROP_IS_INDEXED);
                    if (pValue != null) {
                        Boolean isIndexed = Boolean.valueOf(pValue.getValue());
                        if (!isIndexed.booleanValue()) {
                            if (log.isDebugEnabled()) {
                                log.debug(".. clearing unindexed");
                            }
                            deleteNode(processor, request, node);
                            SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_UNINDEXED_NODE);
                            addDocCmd.solrDoc = doc;
                            if (recordUnindexedNodes) {
                                solrContentStore.storeDocOnSolrContentStore(nodeMetaData, doc);
                                processor.processAdd(addDocCmd);
                            }
                            long end = System.nanoTime();
                            this.trackerStats.addNodeTime(end - start);
                            continue NEXT_NODE;
                        }
                    }
                    // Make sure any unindexed or error doc is removed.
                    if (log.isDebugEnabled()) {
                        log.debug(".. deleting node " + node.getId());
                    }
                    deleteNode(processor, request, node);
                    SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_NODE);
                    addToNewDocAndCache(nodeMetaData, doc);
                    addDocCmd.solrDoc = doc;
                    // System.out.println("###################### indexing doc:"+doc.toString());
                    processor.processAdd(addDocCmd);
                    long end = System.nanoTime();
                    this.trackerStats.addNodeTime(end - start);
                } finally {
                    // release the lock on the node so other trackers can access the node.
                    unlock(nodeId);
                }
            }
        // Ends iteration over nodeMetadatas
        }
    // Ends checking for the existence of updated or unknown node ids
    } catch (Exception e) {
        log.error("SolrInformationServer problem", e);
        // Bulk version failed, so do one at a time.
        for (Node node : nodes) {
            this.indexNode(node, true);
        }
    } finally {
        if (processor != null) {
            processor.finish();
        }
        if (request != null) {
            request.close();
        }
    }
}
Also used : StringPropertyValue(org.alfresco.solr.client.StringPropertyValue) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Node(org.alfresco.solr.client.Node) IntArrayList(com.carrotsearch.hppc.IntArrayList) ArrayList(java.util.ArrayList) SolrInputDocument(org.apache.solr.common.SolrInputDocument) SolrApiNodeStatus(org.alfresco.solr.client.Node.SolrApiNodeStatus) UpdateRequestProcessor(org.apache.solr.update.processor.UpdateRequestProcessor) NodeMetaDataParameters(org.alfresco.solr.client.NodeMetaDataParameters) SolrDocumentList(org.apache.solr.common.SolrDocumentList) IntArrayList(com.carrotsearch.hppc.IntArrayList) ArrayList(java.util.ArrayList) NamedList(org.apache.solr.common.util.NamedList) DocList(org.apache.solr.search.DocList) List(java.util.List) LinkedList(java.util.LinkedList) EnumMap(java.util.EnumMap) SolrQueryResponse(org.apache.solr.response.SolrQueryResponse) NodeMetaData(org.alfresco.solr.client.NodeMetaData) JSONException(org.json.JSONException) AuthenticationException(org.alfresco.httpclient.AuthenticationException) IOException(java.io.IOException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) LinkedList(java.util.LinkedList) LocalSolrQueryRequest(org.apache.solr.request.LocalSolrQueryRequest) SolrQueryRequest(org.apache.solr.request.SolrQueryRequest) DeleteUpdateCommand(org.apache.solr.update.DeleteUpdateCommand) AddUpdateCommand(org.apache.solr.update.AddUpdateCommand) Map(java.util.Map) SolrSimpleOrderedMap(org.alfresco.solr.adapters.SolrSimpleOrderedMap) EnumMap(java.util.EnumMap) LinkedHashMap(java.util.LinkedHashMap) SimpleOrderedMap(org.apache.solr.common.util.SimpleOrderedMap) HashMap(java.util.HashMap) ISimpleOrderedMap(org.alfresco.solr.adapters.ISimpleOrderedMap)

Example 4 with NodeMetaDataParameters

use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.

the class SolrInformationServer method getCascadeNodes.

public List<NodeMetaData> getCascadeNodes(List<Long> txnIds) throws AuthenticationException, IOException, JSONException {
    List<FieldInstance> list = AlfrescoSolrDataModel.getInstance().getIndexedFieldNamesForProperty(ContentModel.PROP_CASCADE_TX).getFields();
    FieldInstance fieldInstance = list.get(0);
    RefCounted<SolrIndexSearcher> refCounted = null;
    IntArrayList docList = null;
    HashSet<Long> childIds = new HashSet();
    try {
        refCounted = core.getSearcher();
        SolrIndexSearcher searcher = refCounted.get();
        String field = fieldInstance.getField();
        SchemaField schemaField = searcher.getSchema().getField(field);
        FieldType fieldType = schemaField.getType();
        BooleanQuery.Builder builder = new BooleanQuery.Builder();
        BooleanQuery booleanQuery = null;
        for (Long l : txnIds) {
            BytesRefBuilder bytesRefBuilder = new BytesRefBuilder();
            fieldType.readableToIndexed(l.toString(), bytesRefBuilder);
            TermQuery termQuery = new TermQuery(new Term(field, bytesRefBuilder.toBytesRef()));
            BooleanClause booleanClause = new BooleanClause(termQuery, BooleanClause.Occur.SHOULD);
            builder.add(booleanClause);
        }
        booleanQuery = builder.build();
        DocListCollector collector = new DocListCollector();
        searcher.search(booleanQuery, collector);
        docList = collector.getDocs();
        // System.out.println("################ CASCASDE Parent Nodes:"+docList.size());
        int size = docList.size();
        Set set = new HashSet();
        set.add(FIELD_SOLR4_ID);
        for (int i = 0; i < size; i++) {
            int docId = docList.get(i);
            Document document = searcher.doc(docId, set);
            IndexableField indexableField = document.getField(FIELD_SOLR4_ID);
            String id = indexableField.stringValue();
            TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
            // System.out.println("################## Cascade Parent:"+ ids.dbId);
            childIds.add(ids.dbId);
        }
    } finally {
        refCounted.decref();
    }
    List<NodeMetaData> allNodeMetaDatas = new ArrayList();
    for (Long childId : childIds) {
        NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
        nmdp.setFromNodeId(childId);
        nmdp.setToNodeId(childId);
        nmdp.setIncludeAclId(false);
        nmdp.setIncludeAspects(false);
        nmdp.setIncludeChildAssociations(false);
        nmdp.setIncludeChildIds(true);
        nmdp.setIncludeNodeRef(false);
        nmdp.setIncludeOwner(false);
        nmdp.setIncludeParentAssociations(false);
        // We only care about the path and ancestors (which is included) for this case
        nmdp.setIncludePaths(true);
        nmdp.setIncludeProperties(false);
        nmdp.setIncludeType(false);
        nmdp.setIncludeTxnId(true);
        // Gets only one
        List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1);
        allNodeMetaDatas.addAll(nodeMetaDatas);
    }
    return allNodeMetaDatas;
}
Also used : BooleanQuery(org.apache.lucene.search.BooleanQuery) Set(java.util.Set) AclChangeSet(org.alfresco.solr.client.AclChangeSet) LinkedHashSet(java.util.LinkedHashSet) IOpenBitSet(org.alfresco.solr.adapters.IOpenBitSet) HashSet(java.util.HashSet) TenantAclIdDbId(org.alfresco.solr.AlfrescoSolrDataModel.TenantAclIdDbId) BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) IntArrayList(com.carrotsearch.hppc.IntArrayList) ArrayList(java.util.ArrayList) Document(org.apache.lucene.document.Document) SolrInputDocument(org.apache.solr.common.SolrInputDocument) SolrDocument(org.apache.solr.common.SolrDocument) NodeMetaDataParameters(org.alfresco.solr.client.NodeMetaDataParameters) FieldInstance(org.alfresco.solr.AlfrescoSolrDataModel.FieldInstance) LinkedHashSet(java.util.LinkedHashSet) HashSet(java.util.HashSet) TermQuery(org.apache.lucene.search.TermQuery) BytesRefBuilder(org.apache.lucene.util.BytesRefBuilder) NodeMetaData(org.alfresco.solr.client.NodeMetaData) SolrIndexSearcher(org.apache.solr.search.SolrIndexSearcher) Term(org.apache.lucene.index.Term) FieldType(org.apache.solr.schema.FieldType) SchemaField(org.apache.solr.schema.SchemaField) BooleanClause(org.apache.lucene.search.BooleanClause) IndexableField(org.apache.lucene.index.IndexableField) IntArrayList(com.carrotsearch.hppc.IntArrayList)

Example 5 with NodeMetaDataParameters

use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.

the class SolrInformationServer method recreateSolrDoc.

private SolrInputDocument recreateSolrDoc(long dbId, String tenant) throws AuthenticationException, IOException, JSONException {
    NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
    nmdp.setFromNodeId(dbId);
    nmdp.setToNodeId(dbId);
    List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE);
    SolrInputDocument newDoc = null;
    if (!nodeMetaDatas.isEmpty()) {
        NodeMetaData nodeMetaData = nodeMetaDatas.get(0);
        newDoc = createNewDoc(nodeMetaData, DOC_TYPE_NODE);
        addFieldsToDoc(nodeMetaData, newDoc);
        boolean isContentIndexedForNode = isContentIndexedForNode(nodeMetaData.getProperties());
        Map<QName, PropertyValue> properties = nodeMetaData.getProperties();
        // Cached doc is null for this method because it is a recreate after a cache miss.
        addPropertiesToDoc(properties, isContentIndexedForNode, newDoc, null, transformContent);
    } else {
        // we get an empty list if a node is deleted
        if (log.isDebugEnabled()) {
            log.debug("Failed to recreate Solr doc with tenant [" + tenant + "] and dbId [" + dbId + "], " + "because node not found in repository.");
        }
    }
    return newDoc;
}
Also used : NodeMetaData(org.alfresco.solr.client.NodeMetaData) SolrInputDocument(org.apache.solr.common.SolrInputDocument) QName(org.alfresco.service.namespace.QName) NodeMetaDataParameters(org.alfresco.solr.client.NodeMetaDataParameters) StringPropertyValue(org.alfresco.solr.client.StringPropertyValue) MLTextPropertyValue(org.alfresco.solr.client.MLTextPropertyValue) ContentPropertyValue(org.alfresco.solr.client.ContentPropertyValue) MultiPropertyValue(org.alfresco.solr.client.MultiPropertyValue) PropertyValue(org.alfresco.solr.client.PropertyValue)

Aggregations

NodeMetaData (org.alfresco.solr.client.NodeMetaData)6 NodeMetaDataParameters (org.alfresco.solr.client.NodeMetaDataParameters)6 SolrInputDocument (org.apache.solr.common.SolrInputDocument)6 IOException (java.io.IOException)4 AddUpdateCommand (org.apache.solr.update.AddUpdateCommand)4 IntArrayList (com.carrotsearch.hppc.IntArrayList)3 HashSet (java.util.HashSet)3 LinkedHashSet (java.util.LinkedHashSet)3 TenantAclIdDbId (org.alfresco.solr.AlfrescoSolrDataModel.TenantAclIdDbId)3 StringPropertyValue (org.alfresco.solr.client.StringPropertyValue)3 SolrDocument (org.apache.solr.common.SolrDocument)3 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 ArrayList (java.util.ArrayList)2 Set (java.util.Set)2 AuthenticationException (org.alfresco.httpclient.AuthenticationException)2 QName (org.alfresco.service.namespace.QName)2 IOpenBitSet (org.alfresco.solr.adapters.IOpenBitSet)2 AclChangeSet (org.alfresco.solr.client.AclChangeSet)2 ContentPropertyValue (org.alfresco.solr.client.ContentPropertyValue)2 MLTextPropertyValue (org.alfresco.solr.client.MLTextPropertyValue)2