use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.
the class SolrInformationServer method cascadeUpdateV2.
private void cascadeUpdateV2(NodeMetaData parentNodeMetaData, boolean overwrite, SolrQueryRequest request, UpdateRequestProcessor processor) throws AuthenticationException, IOException, JSONException {
// System.out.println("################ Cascade update V2 !");
RefCounted<SolrIndexSearcher> refCounted = null;
IntArrayList docList = null;
HashSet<Long> childIds = new HashSet<Long>();
try {
refCounted = core.getSearcher();
SolrIndexSearcher searcher = refCounted.get();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
TermQuery termQuery = new TermQuery(new Term(FIELD_ANCESTOR, parentNodeMetaData.getNodeRef().toString()));
BooleanClause booleanClause = new BooleanClause(termQuery, BooleanClause.Occur.MUST);
builder.add(booleanClause);
BooleanQuery booleanQuery = builder.build();
// System.out.println("################ ANCESTOR QUERY:"+booleanQuery.toString());
DocListCollector collector = new DocListCollector();
searcher.search(booleanQuery, collector);
docList = collector.getDocs();
int size = docList.size();
Set set = new HashSet();
set.add(FIELD_SOLR4_ID);
for (int i = 0; i < size; i++) {
int docId = docList.get(i);
Document document = searcher.doc(docId, set);
IndexableField indexableField = document.getField(FIELD_SOLR4_ID);
String id = indexableField.stringValue();
TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
childIds.add(ids.dbId);
}
} finally {
refCounted.decref();
}
for (Long childId : childIds) {
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
nmdp.setFromNodeId(childId);
nmdp.setToNodeId(childId);
nmdp.setIncludeAclId(false);
nmdp.setIncludeAspects(false);
nmdp.setIncludeChildAssociations(false);
nmdp.setIncludeChildIds(true);
nmdp.setIncludeNodeRef(false);
nmdp.setIncludeOwner(false);
nmdp.setIncludeParentAssociations(false);
// We only care about the path and ancestors (which is included) for this case
nmdp.setIncludePaths(true);
nmdp.setIncludeProperties(false);
nmdp.setIncludeType(false);
nmdp.setIncludeTxnId(true);
// Gets only one
List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1);
if (!nodeMetaDatas.isEmpty()) {
NodeMetaData nodeMetaData = nodeMetaDatas.get(0);
// We do not bring in changes from the future as nodes may switch shards and we do not want the logic here.
if (nodeMetaData.getTxnId() < parentNodeMetaData.getTxnId()) {
long nodeId = nodeMetaData.getId();
try {
if (!spinLock(nodeId, 120000)) {
throw new IOException("Unable to acquire spinlock on:" + nodeId);
}
// System.out.println("################ Starting CASCADE UPDATE:"+nodeMetaData.getId());
if (log.isDebugEnabled()) {
log.debug("... cascade update child doc " + childId);
}
// Gets the document that we have from the content store and updates it
String fixedTenantDomain = AlfrescoSolrDataModel.getTenantId(nodeMetaData.getTenantDomain());
SolrInputDocument cachedDoc = solrContentStore.retrieveDocFromSolrContentStore(fixedTenantDomain, nodeMetaData.getId());
if (cachedDoc == null) {
cachedDoc = recreateSolrDoc(nodeMetaData.getId(), fixedTenantDomain);
// This is a work around for ACE-3228/ACE-3258 and the way stores are expunged when deleting a tenant
if (cachedDoc == null) {
deleteNode(processor, request, nodeMetaData.getId());
}
}
if (cachedDoc != null) {
updatePathRelatedFields(nodeMetaData, cachedDoc);
updateNamePathRelatedFields(nodeMetaData, cachedDoc);
updateAncestorRelatedFields(nodeMetaData, cachedDoc);
AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
addDocCmd.overwrite = overwrite;
addDocCmd.solrDoc = cachedDoc;
// System.out.println("######## Final Cascade Doc :"+cachedDoc);
processor.processAdd(addDocCmd);
solrContentStore.storeDocOnSolrContentStore(fixedTenantDomain, nodeMetaData.getId(), cachedDoc);
} else {
if (log.isDebugEnabled()) {
log.debug("... no child doc found to update " + childId);
}
}
} finally {
unlock(nodeId);
}
}
}
}
}
use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.
the class SolrInformationServer method doUpdateDescendantDocs.
private void doUpdateDescendantDocs(NodeMetaData parentNodeMetaData, boolean overwrite, SolrQueryRequest request, UpdateRequestProcessor processor, LinkedHashSet<Long> stack) throws AuthenticationException, IOException, JSONException {
if ((skipDescendantDocsForSpecificTypes && typesForSkippingDescendantDocs.contains(parentNodeMetaData.getType())) || (skipDescendantDocsForSpecificAspects && shouldBeIgnoredByAnyAspect(parentNodeMetaData.getAspects()))) {
return;
}
HashSet<Long> childIds = new HashSet<Long>();
if (parentNodeMetaData.getChildIds() != null) {
childIds.addAll(parentNodeMetaData.getChildIds());
}
String query = FIELD_PARENT + ":\"" + parentNodeMetaData.getNodeRef() + "\"";
ModifiableSolrParams params = new ModifiableSolrParams(request.getParams());
params.set("q", query).set("fl", FIELD_SOLR4_ID);
if (skippingDocsQueryString != null && !skippingDocsQueryString.isEmpty()) {
params.set("fq", "NOT ( " + skippingDocsQueryString + " )");
}
SolrDocumentList docs = cloud.getSolrDocumentList(nativeRequestHandler, request, params);
for (SolrDocument doc : docs) {
String id = getFieldValueString(doc, FIELD_SOLR4_ID);
TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
childIds.add(ids.dbId);
}
for (Long childId : childIds) {
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
nmdp.setFromNodeId(childId);
nmdp.setToNodeId(childId);
nmdp.setIncludeAclId(false);
nmdp.setIncludeAspects(false);
nmdp.setIncludeChildAssociations(false);
nmdp.setIncludeChildIds(true);
nmdp.setIncludeNodeRef(false);
nmdp.setIncludeOwner(false);
nmdp.setIncludeParentAssociations(false);
// We only care about the path and ancestors (which is included) for this case
nmdp.setIncludePaths(true);
nmdp.setIncludeProperties(false);
nmdp.setIncludeType(false);
nmdp.setIncludeTxnId(false);
// Gets only one
List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1);
if (!nodeMetaDatas.isEmpty()) {
NodeMetaData nodeMetaData = nodeMetaDatas.get(0);
if (mayHaveChildren(nodeMetaData)) {
updateDescendantDocs(nodeMetaData, overwrite, request, processor, stack);
}
try {
if (!spinLock(childId, 120000)) {
// We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
throw new IOException("Unable to acquire lock on nodeId:" + childId);
}
if (log.isDebugEnabled()) {
log.debug("... cascade update child doc " + childId);
}
// Gets the document that we have from the content store and updates it
String fixedTenantDomain = AlfrescoSolrDataModel.getTenantId(nodeMetaData.getTenantDomain());
SolrInputDocument cachedDoc = solrContentStore.retrieveDocFromSolrContentStore(fixedTenantDomain, nodeMetaData.getId());
if (cachedDoc != null) {
updatePathRelatedFields(nodeMetaData, cachedDoc);
updateNamePathRelatedFields(nodeMetaData, cachedDoc);
updateAncestorRelatedFields(nodeMetaData, cachedDoc);
AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
addDocCmd.overwrite = overwrite;
addDocCmd.solrDoc = cachedDoc;
processor.processAdd(addDocCmd);
solrContentStore.storeDocOnSolrContentStore(fixedTenantDomain, nodeMetaData.getId(), cachedDoc);
} else {
if (log.isDebugEnabled()) {
log.debug("... no child doc found to update " + childId);
}
}
} finally {
unlock(childId);
}
}
}
}
use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.
the class SolrInformationServer method indexNodes.
@Override
public void indexNodes(List<Node> nodes, boolean overwrite, boolean cascade) throws IOException, AuthenticationException, JSONException {
SolrQueryRequest request = null;
UpdateRequestProcessor processor = null;
try {
request = getLocalSolrQueryRequest();
processor = this.core.getUpdateProcessingChain(null).createProcessor(request, new SolrQueryResponse());
Map<Long, Node> nodeIdsToNodes = new HashMap<>();
EnumMap<SolrApiNodeStatus, List<Long>> nodeStatusToNodeIds = new EnumMap<SolrApiNodeStatus, List<Long>>(SolrApiNodeStatus.class);
categorizeNodes(nodes, nodeIdsToNodes, nodeStatusToNodeIds);
List<Long> deletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.DELETED));
List<Long> shardDeletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_DELETED));
List<Long> shardUpdatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_UPDATED));
List<Long> unknownNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UNKNOWN));
List<Long> updatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UPDATED));
if (!deletedNodeIds.isEmpty() || !shardDeletedNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty()) {
// fix up any secondary paths
List<NodeMetaData> nodeMetaDatas = new ArrayList<>();
// For all deleted nodes, fake the node metadata
for (Long deletedNodeId : deletedNodeIds) {
Node node = nodeIdsToNodes.get(deletedNodeId);
NodeMetaData nodeMetaData = createDeletedNodeMetaData(node);
nodeMetaDatas.add(nodeMetaData);
}
if (!unknownNodeIds.isEmpty()) {
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
nmdp.setNodeIds(unknownNodeIds);
nodeMetaDatas.addAll(repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE));
}
for (NodeMetaData nodeMetaData : nodeMetaDatas) {
Node node = nodeIdsToNodes.get(nodeMetaData.getId());
if (nodeMetaData.getTxnId() > node.getTxnId()) {
// it will be indexed later
continue;
}
if (nodeMetaData != null) {
try {
// Lock the node to ensure that no other trackers work with this node until this code completes.
if (!spinLock(nodeMetaData.getId(), 120000)) {
// We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
throw new Exception("Unable to acquire lock on nodeId:" + nodeMetaData.getId());
}
solrContentStore.removeDocFromContentStore(nodeMetaData);
} finally {
unlock(nodeMetaData.getId());
}
}
}
if (log.isDebugEnabled()) {
log.debug(".. deleting");
}
DeleteUpdateCommand delDocCmd = new DeleteUpdateCommand(request);
String query = this.cloud.getQuery(FIELD_DBID, OR, deletedNodeIds, shardDeletedNodeIds, shardUpdatedNodeIds, unknownNodeIds);
delDocCmd.setQuery(query);
processor.processDelete(delDocCmd);
}
if (!updatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty()) {
log.info(".. updating");
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
List<Long> nodeIds = new LinkedList<>();
nodeIds.addAll(updatedNodeIds);
nodeIds.addAll(unknownNodeIds);
nodeIds.addAll(shardUpdatedNodeIds);
nmdp.setNodeIds(nodeIds);
// Fetches bulk metadata
List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE);
NEXT_NODE: for (NodeMetaData nodeMetaData : nodeMetaDatas) {
// System.out.println("####################### NodeMetaData:"+ nodeMetaData.getId());
long start = System.nanoTime();
Node node = nodeIdsToNodes.get(nodeMetaData.getId());
long nodeId = node.getId();
try {
// Lock the node to ensure that no other trackers work with this node until this code completes.
if (!spinLock(nodeId, 120000)) {
// We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
throw new Exception("Unable to acquire lock on nodeId:" + nodeId);
}
if (nodeMetaData.getTxnId() > node.getTxnId()) {
// it will be indexed later
continue;
}
if (nodeIdsToNodes.get(nodeMetaData.getId()).getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED) {
if (nodeMetaData.getProperties().get(ContentModel.PROP_CASCADE_TX) != null) {
indexNonShardCascade(nodeMetaData);
}
continue;
}
AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
addDocCmd.overwrite = overwrite;
// check index control
Map<QName, PropertyValue> properties = nodeMetaData.getProperties();
StringPropertyValue pValue = (StringPropertyValue) properties.get(ContentModel.PROP_IS_INDEXED);
if (pValue != null) {
Boolean isIndexed = Boolean.valueOf(pValue.getValue());
if (!isIndexed.booleanValue()) {
if (log.isDebugEnabled()) {
log.debug(".. clearing unindexed");
}
deleteNode(processor, request, node);
SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_UNINDEXED_NODE);
addDocCmd.solrDoc = doc;
if (recordUnindexedNodes) {
solrContentStore.storeDocOnSolrContentStore(nodeMetaData, doc);
processor.processAdd(addDocCmd);
}
long end = System.nanoTime();
this.trackerStats.addNodeTime(end - start);
continue NEXT_NODE;
}
}
// Make sure any unindexed or error doc is removed.
if (log.isDebugEnabled()) {
log.debug(".. deleting node " + node.getId());
}
deleteNode(processor, request, node);
SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_NODE);
addToNewDocAndCache(nodeMetaData, doc);
addDocCmd.solrDoc = doc;
// System.out.println("###################### indexing doc:"+doc.toString());
processor.processAdd(addDocCmd);
long end = System.nanoTime();
this.trackerStats.addNodeTime(end - start);
} finally {
// release the lock on the node so other trackers can access the node.
unlock(nodeId);
}
}
// Ends iteration over nodeMetadatas
}
// Ends checking for the existence of updated or unknown node ids
} catch (Exception e) {
log.error("SolrInformationServer problem", e);
// Bulk version failed, so do one at a time.
for (Node node : nodes) {
this.indexNode(node, true);
}
} finally {
if (processor != null) {
processor.finish();
}
if (request != null) {
request.close();
}
}
}
use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.
the class SolrInformationServer method getCascadeNodes.
public List<NodeMetaData> getCascadeNodes(List<Long> txnIds) throws AuthenticationException, IOException, JSONException {
List<FieldInstance> list = AlfrescoSolrDataModel.getInstance().getIndexedFieldNamesForProperty(ContentModel.PROP_CASCADE_TX).getFields();
FieldInstance fieldInstance = list.get(0);
RefCounted<SolrIndexSearcher> refCounted = null;
IntArrayList docList = null;
HashSet<Long> childIds = new HashSet();
try {
refCounted = core.getSearcher();
SolrIndexSearcher searcher = refCounted.get();
String field = fieldInstance.getField();
SchemaField schemaField = searcher.getSchema().getField(field);
FieldType fieldType = schemaField.getType();
BooleanQuery.Builder builder = new BooleanQuery.Builder();
BooleanQuery booleanQuery = null;
for (Long l : txnIds) {
BytesRefBuilder bytesRefBuilder = new BytesRefBuilder();
fieldType.readableToIndexed(l.toString(), bytesRefBuilder);
TermQuery termQuery = new TermQuery(new Term(field, bytesRefBuilder.toBytesRef()));
BooleanClause booleanClause = new BooleanClause(termQuery, BooleanClause.Occur.SHOULD);
builder.add(booleanClause);
}
booleanQuery = builder.build();
DocListCollector collector = new DocListCollector();
searcher.search(booleanQuery, collector);
docList = collector.getDocs();
// System.out.println("################ CASCASDE Parent Nodes:"+docList.size());
int size = docList.size();
Set set = new HashSet();
set.add(FIELD_SOLR4_ID);
for (int i = 0; i < size; i++) {
int docId = docList.get(i);
Document document = searcher.doc(docId, set);
IndexableField indexableField = document.getField(FIELD_SOLR4_ID);
String id = indexableField.stringValue();
TenantAclIdDbId ids = AlfrescoSolrDataModel.decodeNodeDocumentId(id);
// System.out.println("################## Cascade Parent:"+ ids.dbId);
childIds.add(ids.dbId);
}
} finally {
refCounted.decref();
}
List<NodeMetaData> allNodeMetaDatas = new ArrayList();
for (Long childId : childIds) {
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
nmdp.setFromNodeId(childId);
nmdp.setToNodeId(childId);
nmdp.setIncludeAclId(false);
nmdp.setIncludeAspects(false);
nmdp.setIncludeChildAssociations(false);
nmdp.setIncludeChildIds(true);
nmdp.setIncludeNodeRef(false);
nmdp.setIncludeOwner(false);
nmdp.setIncludeParentAssociations(false);
// We only care about the path and ancestors (which is included) for this case
nmdp.setIncludePaths(true);
nmdp.setIncludeProperties(false);
nmdp.setIncludeType(false);
nmdp.setIncludeTxnId(true);
// Gets only one
List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, 1);
allNodeMetaDatas.addAll(nodeMetaDatas);
}
return allNodeMetaDatas;
}
use of org.alfresco.solr.client.NodeMetaDataParameters in project SearchServices by Alfresco.
the class SolrInformationServer method recreateSolrDoc.
private SolrInputDocument recreateSolrDoc(long dbId, String tenant) throws AuthenticationException, IOException, JSONException {
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
nmdp.setFromNodeId(dbId);
nmdp.setToNodeId(dbId);
List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE);
SolrInputDocument newDoc = null;
if (!nodeMetaDatas.isEmpty()) {
NodeMetaData nodeMetaData = nodeMetaDatas.get(0);
newDoc = createNewDoc(nodeMetaData, DOC_TYPE_NODE);
addFieldsToDoc(nodeMetaData, newDoc);
boolean isContentIndexedForNode = isContentIndexedForNode(nodeMetaData.getProperties());
Map<QName, PropertyValue> properties = nodeMetaData.getProperties();
// Cached doc is null for this method because it is a recreate after a cache miss.
addPropertiesToDoc(properties, isContentIndexedForNode, newDoc, null, transformContent);
} else {
// we get an empty list if a node is deleted
if (log.isDebugEnabled()) {
log.debug("Failed to recreate Solr doc with tenant [" + tenant + "] and dbId [" + dbId + "], " + "because node not found in repository.");
}
}
return newDoc;
}
Aggregations