use of org.alfresco.solr.client.AclReaders in project SearchServices by Alfresco.
the class AclTracker method indexAcls.
protected void indexAcls() throws AuthenticationException, IOException, JSONException {
while (aclsToIndex.peek() != null) {
Long aclId = aclsToIndex.poll();
if (aclId != null) {
// System.out.println("############## Indexing ACL ID:"+aclId);
Acl acl = new Acl(0, aclId);
List<AclReaders> readers = client.getAclReaders(Collections.singletonList(acl));
// AclReaders r = readers.get(0);
// System.out.println("############## READERS ID:"+r.getId()+":"+r.getReaders());
indexAcl(readers, false);
}
checkShutdown();
}
}
use of org.alfresco.solr.client.AclReaders in project SearchServices by Alfresco.
the class AclTracker method checkAcl.
public AclReport checkAcl(Long aclid) {
AclReport aclReport = new AclReport();
aclReport.setAclId(aclid);
try {
List<AclReaders> readers = client.getAclReaders(Collections.singletonList(new Acl(0, aclid)));
aclReport.setExistsInDb(readers.size() == 1);
} catch (IOException | JSONException | AuthenticationException e) {
aclReport.setExistsInDb(false);
}
// In Index
return this.infoSrv.checkAclInIndex(aclid, aclReport);
}
use of org.alfresco.solr.client.AclReaders in project SearchServices by Alfresco.
the class AlfrescoSolrTrackerRollbackTest method testTrackers.
@Test
public void testTrackers() throws Exception {
AlfrescoCoreAdminHandler alfrescoCoreAdminHandler = (AlfrescoCoreAdminHandler) h.getCore().getCoreContainer().getMultiCoreHandler();
/*
* Create and index an AclChangeSet.
*/
AclChangeSet aclChangeSet = getAclChangeSet(1, 1);
Acl acl = getAcl(aclChangeSet);
Acl acl2 = getAcl(aclChangeSet);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null);
AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null);
indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2));
// Check for the ACL state stamp.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
TrackerRegistry trackerRegistry = alfrescoCoreAdminHandler.getTrackerRegistry();
Collection<Tracker> trackers = trackerRegistry.getTrackersForCore(h.getCore().getName());
MetadataTracker metadataTracker = null;
CommitTracker commitTracker = null;
for (Tracker tracker : trackers) {
if (tracker instanceof MetadataTracker) {
metadataTracker = (MetadataTracker) tracker;
} else if (tracker instanceof CommitTracker) {
commitTracker = (CommitTracker) tracker;
}
}
/*
* Create and index a Transaction
*/
// First create a transaction.
Transaction txn = getTransaction(0, 3, 1);
// Next create two nodes to update for the transaction
Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false);
NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
// The errorNodeMetaData will cause an exception.
NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true);
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(txn, list(errorNode, folderNode, fileNode), list(errorMetaData, folderMetaData, fileMetaData));
// Check for the TXN state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 1, MAX_WAIT_TIME);
// Stop the commit tracker
// This will allow the metadata tracker to index the next transaction and leave it uncommitted in the index.
commitTracker.getRunLock().acquire();
Transaction rollbackTxn = getTransaction(0, 1, 2);
Node rollbackNode = getNode(rollbackTxn, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData rollbackMetaData = getNodeMetaData(rollbackNode, rollbackTxn, acl, "mike", null, false);
indexTransaction(rollbackTxn, list(rollbackNode), list(rollbackMetaData));
long cycles = metadataTracker.getTrackerState().getTrackerCycles();
// Wait three tracker cycles
while (metadataTracker.getTrackerState().getTrackerCycles() < cycles + 3) {
Thread.sleep(1000);
}
// Take the rollback transaction out of the queue so it doesn't get re-indexed following the rollback.
// This will prove the rollback transaction was rolled back
SOLRAPIQueueClient.transactionQueue.remove(rollbackTxn);
metadataTracker.setRollback(true);
commitTracker.getRunLock().release();
while (commitTracker.getRollbackCount() == 0) {
Thread.sleep(1000);
}
// The rollback occurred
// Let's add another node and acl
AclChangeSet afterRollbackAclChangeSet = getAclChangeSet(1, 10);
Acl afterRollbackAcl = getAcl(aclChangeSet);
AclReaders afterRollbackAclReaders = getAclReaders(afterRollbackAclChangeSet, afterRollbackAcl, list("joel"), list("phil"), null);
indexAclChangeSet(afterRollbackAclChangeSet, list(afterRollbackAcl), list(afterRollbackAclReaders));
Transaction afterRollbackTxn = getTransaction(0, 1, 3);
Node afterRollbackNode = getNode(afterRollbackTxn, acl, Node.SolrApiNodeStatus.UPDATED);
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData afterRollbackMetaData = getNodeMetaData(afterRollbackNode, afterRollbackTxn, acl, "mike", null, false);
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(afterRollbackTxn, list(afterRollbackNode), list(afterRollbackMetaData));
// Wait for the node to appear
// Assert the rolled back transaction is not in the index.
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 3, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(afterRollbackNode.getId()))), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(rollbackNode.getId()))), 0, MAX_WAIT_TIME);
// Check for the ACL state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, afterRollbackAclChangeSet.getId(), afterRollbackAclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
}
use of org.alfresco.solr.client.AclReaders in project SearchServices by Alfresco.
the class AlfrescoSolrTrackerTest method testTrackers.
@Test
public void testTrackers() throws Exception {
/*
* Create and index an AclChangeSet.
*/
logger.info("######### Starting tracker test ###########");
AclChangeSet aclChangeSet = getAclChangeSet(1);
Acl acl = getAcl(aclChangeSet);
// Test with long value
Acl acl2 = getAcl(aclChangeSet, Long.MAX_VALUE - 10);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null);
AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null);
indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2));
// Check for the ACL state stamp.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed First Test ##############################");
/*
* Create and index a Transaction
*/
// First create a transaction.
Transaction txn = getTransaction(0, 2);
// Next create two nodes to update for the transaction
Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
logger.info("######### error node:" + errorNode.getId());
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false);
NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
// The errorNodeMetaData will cause an exception.
NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true);
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(txn, list(errorNode, folderNode, fileNode), list(errorMetaData, folderMetaData, fileMetaData));
// Check for the TXN state stamp.
logger.info("#################### Started Second Test ##############################");
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed Second Test ##############################");
/*
* Query the index for the content
*/
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Third Test ##############################");
ModifiableSolrParams params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
SolrServletRequest req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"joel\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourth Test ##############################");
// Check for the error doc
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Fifth Test ##############################");
// Mark the folder as needing cascade
Transaction txn1 = getTransaction(0, 1);
// Update the properties on the Node and NodeMetaData to simulate an update to the Node.
folderMetaData.getProperties().put(ContentModel.PROP_CASCADE_TX, new StringPropertyValue(Long.toString(txn1.getId())));
// Update the txnId
folderNode.setTxnId(txn1.getId());
folderMetaData.setTxnId(txn1.getId());
// Change the ancestor on the file just to see if it's been updated
NodeRef nodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
fileMetaData.setAncestors(ancestors(nodeRef));
// This will add the PROP_CASCADE_TX property to the folder.
logger.info("################### ADDING CASCADE TRANSACTION #################");
indexTransaction(txn1, list(folderNode), list(folderMetaData));
// Check for the TXN state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn1.getId(), txn1.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Sixth Test ##############################");
TermQuery termQuery1 = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString()));
waitForDocCount(termQuery1, 1, MAX_WAIT_TIME);
params = new ModifiableSolrParams();
params.add("q", QueryConstants.FIELD_ANCESTOR + ":\"" + nodeRef.toString() + "\"");
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"mike\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventh Test ##############################");
// Check that both documents have been indexed and have content.
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
logger.info("#################### Passed Eighth Test ##############################");
// Add document with isContentIndexed=false
Transaction txnNoContent = getTransaction(0, 1);
Node noContentNode = getNode(txnNoContent, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData noContentMetaData = getNodeMetaData(noContentNode, txnNoContent, acl, "mike", null, false);
noContentMetaData.getProperties().put(ContentModel.PROP_IS_CONTENT_INDEXED, new StringPropertyValue("false"));
noContentMetaData.getProperties().put(ContentModel.PROP_CONTENT, new ContentPropertyValue(Locale.UK, 298L, "UTF-8", "text/json", null));
indexTransaction(txnNoContent, list(noContentNode), list(noContentMetaData));
// This tests that the mime type has been added for this document. It is the only document with text/json in the index.
waitForDocCount(new TermQuery(new Term("content@s__mimetype@{http://www.alfresco.org/model/content/1.0}content", "text/json")), 1, MAX_WAIT_TIME);
// Many of the tests beyond this point rely on a specific count of documents in the index that have content.
// This document should not have had the content indexed so the tests following will pass.
// If the content had been indexed the tests following this one would have failed.
// This proves that the ContentModel.PROP_IS_CONTENT_INDEXED property is being followed by the tracker
// Try bulk loading
Transaction txn2 = getTransaction(0, 550);
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
for (int i = 0; i < 550; i++) {
Node n = getNode(txn2, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txn2, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodes.add(n);
nodeMetaDatas.add(nm);
}
logger.info("############################ Bulk Nodes:" + nodes.size());
indexTransaction(txn2, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 552, MAX_WAIT_TIME);
logger.info("#################### Passed Ninth Test ##############################");
for (int i = 0; i < 1000; i++) {
Transaction txnX = getTransaction(0, 1);
List<Node> nodesX = new ArrayList();
List<NodeMetaData> nodeMetaDatasX = new ArrayList();
Node n = getNode(txnX, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txnX, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodesX.add(n);
nodeMetaDatasX.add(nm);
indexTransaction(txnX, nodesX, nodeMetaDatasX);
}
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1552, MAX_WAIT_TIME);
logger.info("#################### Passed Tenth Test ##############################");
// Test the maintenance methods
fileMetaData.setOwner("amy");
reindexTransactionId(txn.getId());
folderMetaData.setOwner("jill");
reindexNodeId(folderNode.getId());
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "amy")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Eleventh Test ##############################");
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "jill")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Twelth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"amy\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + folderNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"jill\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + folderNode.getId() + "']");
logger.info("#################### Passed Fifteenth Test ##############################");
List<String> readers = aclReaders.getReaders();
// Change the aclReader
readers.set(0, "andy");
indexAclId(acl.getId());
List<String> readers2 = aclReaders2.getReaders();
// Change the aclReader
readers2.set(0, "ice");
reindexAclId(acl2.getId());
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 1, MAX_WAIT_TIME);
// Ice should have replaced jim in acl2.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 0, MAX_WAIT_TIME);
logger.info("#################### Passed Sixteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"andy\"], \"tenants\": [ \"\" ]}");
// FIX ME assertQ(req, "*[count(//doc)=1]","//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventeenth Test ##############################");
// Change the aclReader
readers.set(0, "alan");
// Change the aclReader
readers2.set(0, "paul");
// This should replace "andy" and "ice" with "alan" and "paul"
reindexAclChangeSetId(aclChangeSet.getId());
// Test that "alan" and "paul" are in the index
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 1, MAX_WAIT_TIME);
// Test that "andy" and "ice" are removed
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 0, MAX_WAIT_TIME);
// Test Maintenance acl purge
purgeAclId(acl2.getId());
// Test Maintenance node purge
purgeNodeId(fileNode.getId());
purgeTransactionId(txn2.getId());
// paul should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 0, MAX_WAIT_TIME);
// Refects the purged node and transaction
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1001, MAX_WAIT_TIME);
logger.info("#################### Passed Eighteenth Test ##############################");
purgeAclChangeSetId(aclChangeSet.getId());
// alan should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 0, MAX_WAIT_TIME);
// Fix the error node
errorMetaData.setNodeRef(new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID()));
// Reload the error node.
logger.info("Retry the error node");
retry();
// The error in the index should disappear.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 0, MAX_WAIT_TIME);
// And the error node should be present
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(errorNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Nineteenth Test ##############################");
}
use of org.alfresco.solr.client.AclReaders in project SearchServices by Alfresco.
the class DistributedDbidRangeAlfrescoSolrTrackerTest method testDbIdRange.
@Test
public void testDbIdRange() throws Exception {
putHandleDefaults();
int numAcls = 250;
AclChangeSet bulkAclChangeSet = getAclChangeSet(numAcls);
List<Acl> bulkAcls = new ArrayList();
List<AclReaders> bulkAclReaders = new ArrayList();
for (int i = 0; i < numAcls; i++) {
Acl bulkAcl = getAcl(bulkAclChangeSet);
bulkAcls.add(bulkAcl);
bulkAclReaders.add(getAclReaders(bulkAclChangeSet, bulkAcl, list("joel" + bulkAcl.getId()), list("phil" + bulkAcl.getId()), null));
}
indexAclChangeSet(bulkAclChangeSet, bulkAcls, bulkAclReaders);
int numNodes = 150;
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
Transaction bigTxn = getTransaction(0, numNodes);
for (int i = 0; i < numNodes; i++) {
int aclIndex = i % numAcls;
Node node = getNode((long) i, bigTxn, bulkAcls.get(aclIndex), Node.SolrApiNodeStatus.UPDATED);
nodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, bigTxn, bulkAcls.get(aclIndex), "mike", null, false);
nodeMetaDatas.add(nodeMetaData);
}
indexTransaction(bigTxn, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), numNodes, 100000);
waitForDocCountAllCores(new TermQuery(new Term(FIELD_DOC_TYPE, SolrInformationServer.DOC_TYPE_ACL)), numAcls, 80000);
// The test framework has ranges 0-100, 100-200, ...
assertShardCount(0, new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 100);
assertShardCount(1, new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 50);
}
Aggregations