use of org.alfresco.solr.client.AclChangeSet in project SearchServices by Alfresco.
the class DistributedDbidRangeAlfrescoSolrTrackerTest method testDbIdRange.
@Test
public void testDbIdRange() throws Exception {
putHandleDefaults();
int numAcls = 250;
AclChangeSet bulkAclChangeSet = getAclChangeSet(numAcls);
List<Acl> bulkAcls = new ArrayList();
List<AclReaders> bulkAclReaders = new ArrayList();
for (int i = 0; i < numAcls; i++) {
Acl bulkAcl = getAcl(bulkAclChangeSet);
bulkAcls.add(bulkAcl);
bulkAclReaders.add(getAclReaders(bulkAclChangeSet, bulkAcl, list("joel" + bulkAcl.getId()), list("phil" + bulkAcl.getId()), null));
}
indexAclChangeSet(bulkAclChangeSet, bulkAcls, bulkAclReaders);
int numNodes = 150;
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
Transaction bigTxn = getTransaction(0, numNodes);
for (int i = 0; i < numNodes; i++) {
int aclIndex = i % numAcls;
Node node = getNode((long) i, bigTxn, bulkAcls.get(aclIndex), Node.SolrApiNodeStatus.UPDATED);
nodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, bigTxn, bulkAcls.get(aclIndex), "mike", null, false);
nodeMetaDatas.add(nodeMetaData);
}
indexTransaction(bigTxn, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), numNodes, 100000);
waitForDocCountAllCores(new TermQuery(new Term(FIELD_DOC_TYPE, SolrInformationServer.DOC_TYPE_ACL)), numAcls, 80000);
// The test framework has ranges 0-100, 100-200, ...
assertShardCount(0, new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 100);
assertShardCount(1, new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 50);
}
use of org.alfresco.solr.client.AclChangeSet in project SearchServices by Alfresco.
the class DistributedAlfrescoSolrFingerPrintTest method testFingerPrint.
@Test
public void testFingerPrint() throws Exception {
handle.put("explain", SKIPVAL);
handle.put("timestamp", SKIPVAL);
handle.put("score", SKIPVAL);
handle.put("wt", SKIP);
handle.put("distrib", SKIP);
handle.put("shards.qt", SKIP);
handle.put("shards", SKIP);
// No longer used can be removed in Solr 6.
handle.put("spellcheck-extras", SKIP);
handle.put("q", SKIP);
handle.put("maxScore", SKIPVAL);
handle.put("_version_", SKIP);
handle.put("_original_parameters_", SKIP);
/*
* Create and index an AclChangeSet.
*/
AclChangeSet aclChangeSet = getAclChangeSet(1);
Acl acl = getAcl(aclChangeSet);
Acl acl2 = getAcl(aclChangeSet);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null);
AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null);
indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2));
// Check for the ACL state stamp.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCountAllCores(waitForQuery, 1, 80000);
/*
* Create and index a Transaction
*/
// First create a transaction.
Transaction txn = getTransaction(0, 4);
// Next create two nodes to update for the transaction
Node node1 = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node node2 = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node node3 = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node node4 = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData nodeMetaData1 = getNodeMetaData(node1, txn, acl, "mike", null, false);
NodeMetaData nodeMetaData2 = getNodeMetaData(node2, txn, acl, "mike", null, false);
NodeMetaData nodeMetaData3 = getNodeMetaData(node3, txn, acl, "mike", null, false);
NodeMetaData nodeMetaData4 = getNodeMetaData(node4, txn, acl, "mike", null, false);
List<String> content = new ArrayList();
int[] sizes = { 2000, 1000, 1500, 750 };
Random r = new Random(1);
String token1 = Integer.toString(Math.abs(r.nextInt()));
for (int i = 0; i < 4; i++) {
Random rand = new Random(1);
StringBuilder buf = new StringBuilder();
int size = sizes[i];
for (int s = 0; s < size; s++) {
if (s > 0) {
buf.append(" ");
}
buf.append(Integer.toString(Math.abs(rand.nextInt())));
}
content.add(buf.toString());
}
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(txn, list(node1, node2, node3, node4), list(nodeMetaData1, nodeMetaData2, nodeMetaData3, nodeMetaData4), content);
// Check for the TXN state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCountAllCores(waitForQuery, 1, 80000);
/*
* Query the index for the content
*/
waitForDocCountAllCores(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 1, 80000);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", token1)), 4, 80000);
QueryResponse response = query(getDefaultTestClient(), true, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [\"joel\"], \"tenants\": []}", params("q", "FINGERPRINT:" + node1.getId(), "qt", "/afts", "shards.qt", "/afts", "start", "0", "fl", "DBID,score", "rows", "100"));
SolrDocumentList docs = response.getResults();
assertTrue(docs.getNumFound() == 4);
SolrDocument doc0 = docs.get(0);
long dbid0 = (long) doc0.getFieldValue("DBID");
assertTrue(dbid0 == node1.getId());
SolrDocument doc1 = docs.get(1);
long dbid1 = (long) doc1.getFieldValue("DBID");
assertTrue(dbid1 == node3.getId());
SolrDocument doc2 = docs.get(2);
long dbid2 = (long) doc2.getFieldValue("DBID");
assertTrue(dbid2 == node2.getId());
SolrDocument doc3 = docs.get(3);
long dbid3 = (long) doc3.getFieldValue("DBID");
assertTrue(dbid3 == node4.getId());
response = query(getDefaultTestClient(), true, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [\"joel\"], \"tenants\": []}", params("q", "FINGERPRINT:" + node1.getId() + "_70", "qt", "/afts", "shards.qt", "/afts", "start", "0", "fl", "DBID,score", "rows", "100"));
docs = response.getResults();
assertTrue(docs.getNumFound() == 2);
doc0 = docs.get(0);
dbid0 = (long) doc0.getFieldValue("DBID");
assertTrue(dbid0 == node1.getId());
doc1 = docs.get(1);
dbid1 = (long) doc1.getFieldValue("DBID");
assertTrue(dbid1 == node3.getId());
response = query(getDefaultTestClient(), true, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [\"joel\"], \"tenants\": []}", params("q", "FINGERPRINT:" + node1.getId() + "_45", "qt", "/afts", "shards.qt", "/afts", "start", "0", "fl", "DBID,score", "rows", "100"));
docs = response.getResults();
assertTrue(docs.getNumFound() == 3);
doc0 = docs.get(0);
dbid0 = (long) doc0.getFieldValue("DBID");
assertTrue(dbid0 == node1.getId());
doc1 = docs.get(1);
dbid1 = (long) doc1.getFieldValue("DBID");
assertTrue(dbid1 == node3.getId());
doc2 = docs.get(2);
dbid2 = (long) doc2.getFieldValue("DBID");
assertTrue(dbid2 == node2.getId());
response = query(getDefaultTestClient(), true, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [\"joel\"], \"tenants\": []}", params("q", "FINGERPRINT:" + nodeMetaData1.getNodeRef().getId(), "qt", "/afts", "shards.qt", "/afts", "start", "0", "fl", "DBID,score", "rows", "100"));
docs = response.getResults();
assertTrue(docs.getNumFound() == 4);
doc0 = docs.get(0);
dbid0 = (long) doc0.getFieldValue("DBID");
assertTrue(dbid0 == node1.getId());
doc1 = docs.get(1);
dbid1 = (long) doc1.getFieldValue("DBID");
assertTrue(dbid1 == node3.getId());
doc2 = docs.get(2);
dbid2 = (long) doc2.getFieldValue("DBID");
assertTrue(dbid2 == node2.getId());
doc3 = docs.get(3);
dbid3 = (long) doc3.getFieldValue("DBID");
assertTrue(dbid3 == node4.getId());
response = query(getDefaultTestClient(), true, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [\"joel\"], \"tenants\": []}", params("q", "FINGERPRINT:" + nodeMetaData1.getNodeRef().getId() + "_70", "qt", "/afts", "shards.qt", "/afts", "start", "0", "fl", "DBID,score", "rows", "100"));
docs = response.getResults();
assertTrue(docs.getNumFound() == 2);
doc0 = docs.get(0);
dbid0 = (long) doc0.getFieldValue("DBID");
assertTrue(dbid0 == node1.getId());
doc1 = docs.get(1);
dbid1 = (long) doc1.getFieldValue("DBID");
assertTrue(dbid1 == node3.getId());
response = query(getDefaultTestClient(), true, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [\"joel\"], \"tenants\": []}", params("q", "FINGERPRINT:" + nodeMetaData1.getNodeRef().getId() + "_45", "qt", "/afts", "shards.qt", "/afts", "start", "0", "fl", "DBID,score", "rows", "100"));
docs = response.getResults();
assertTrue(docs.getNumFound() == 3);
doc0 = docs.get(0);
dbid0 = (long) doc0.getFieldValue("DBID");
assertTrue(dbid0 == node1.getId());
doc1 = docs.get(1);
dbid1 = (long) doc1.getFieldValue("DBID");
assertTrue(dbid1 == node3.getId());
doc2 = docs.get(2);
dbid2 = (long) doc2.getFieldValue("DBID");
assertTrue(dbid2 == node2.getId());
}
use of org.alfresco.solr.client.AclChangeSet in project SearchServices by Alfresco.
the class AlfrescoSolrTrackerExceptionTest method testTrackers.
@Test
public void testTrackers() throws Exception {
/*
* Create and index an AclChangeSet.
*/
logger.info("######### Starting tracker test ###########");
AclChangeSet aclChangeSet = getAclChangeSet(1, 1);
Acl acl = getAcl(aclChangeSet);
Acl acl2 = getAcl(aclChangeSet);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null);
AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null);
Thread.sleep(20000);
SOLRAPIQueueClient.setThrowException(false);
logger.info("#################### Stop throwing exceptions ##############################");
indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2));
// Check for the ACL state stamp.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed First Test ##############################");
// assert(false);
/*
* Create and index a Transaction
*/
// First create a transaction.
Transaction txn = getTransaction(0, 2, 1);
// Next create two nodes to update for the transaction
Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
logger.info("######### error node:" + errorNode.getId());
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false);
NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
// The errorNodeMetaData will cause an exception.
NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true);
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(txn, list(errorNode, folderNode, fileNode), list(errorMetaData, folderMetaData, fileMetaData));
logger.info("#################### Start throwing exceptions ##############################");
SOLRAPIQueueClient.setThrowException(true);
Thread.sleep(10000);
SOLRAPIQueueClient.setThrowException(false);
logger.info("#################### Stop throwing exceptions ##############################");
// Check for the TXN state stamp.
logger.info("#################### Started Second Test ##############################");
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed Second Test ##############################");
/*
* Query the index for the content
*/
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Third Test ##############################");
ModifiableSolrParams params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
SolrServletRequest req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"joel\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourth Test ##############################");
// Check for the error doc
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Fifth Test ##############################");
// Mark the folder as needing cascade
Transaction txn1 = getTransaction(0, 1, 2);
// Update the properties on the Node and NodeMetaData to simulate an update to the Node.
folderMetaData.getProperties().put(ContentModel.PROP_CASCADE_TX, new StringPropertyValue(Long.toString(txn1.getId())));
// Update the txnId
folderNode.setTxnId(txn1.getId());
folderMetaData.setTxnId(txn1.getId());
// Change the ancestor on the file just to see if it's been updated
NodeRef nodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
fileMetaData.setAncestors(ancestors(nodeRef));
// This will add the PROP_CASCADE_TX property to the folder.
logger.info("################### ADDING CASCADE TRANSACTION #################");
indexTransaction(txn1, list(folderNode), list(folderMetaData));
// Check for the TXN state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn1.getId(), txn1.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Sixth Test ##############################");
TermQuery termQuery1 = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString()));
waitForDocCount(termQuery1, 1, MAX_WAIT_TIME);
params = new ModifiableSolrParams();
params.add("q", QueryConstants.FIELD_ANCESTOR + ":\"" + nodeRef.toString() + "\"");
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"mike\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventh Test ##############################");
// Check that both documents have been indexed and have content.
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
logger.info("#################### Passed Eighth Test ##############################");
// Try bulk loading
Transaction txn2 = getTransaction(0, 550);
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
for (int i = 0; i < 550; i++) {
Node n = getNode(txn2, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txn2, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodes.add(n);
nodeMetaDatas.add(nm);
}
logger.info("############################ Bulk Nodes:" + nodes.size());
indexTransaction(txn2, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 552, MAX_WAIT_TIME);
logger.info("#################### Passed Ninth Test ##############################");
for (int i = 0; i < 1000; i++) {
Transaction txnX = getTransaction(0, 1);
List<Node> nodesX = new ArrayList();
List<NodeMetaData> nodeMetaDatasX = new ArrayList();
Node n = getNode(txnX, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txnX, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodesX.add(n);
nodeMetaDatasX.add(nm);
indexTransaction(txnX, nodesX, nodeMetaDatasX);
}
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1552, MAX_WAIT_TIME);
logger.info("#################### Passed Tenth Test ##############################");
// Test the maintenance methods
fileMetaData.setOwner("amy");
reindexTransactionId(txn.getId());
folderMetaData.setOwner("jill");
reindexNodeId(folderNode.getId());
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "amy")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Eleventh Test ##############################");
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "jill")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Twelth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"amy\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + folderNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"jill\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + folderNode.getId() + "']");
logger.info("#################### Passed Fifteenth Test ##############################");
List<String> readers = aclReaders.getReaders();
// Change the aclReader
readers.set(0, "andy");
indexAclId(acl.getId());
List<String> readers2 = aclReaders2.getReaders();
// Change the aclReader
readers2.set(0, "ice");
reindexAclId(acl2.getId());
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 1, MAX_WAIT_TIME);
// Ice should have replaced jim in acl2.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 0, MAX_WAIT_TIME);
logger.info("#################### Passed Sixteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"andy\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventeenth Test ##############################");
// Change the aclReader
readers.set(0, "alan");
// Change the aclReader
readers2.set(0, "paul");
// This should replace "andy" and "ice" with "alan" and "paul"
reindexAclChangeSetId(aclChangeSet.getId());
// Test that "alan" and "paul" are in the index
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 1, MAX_WAIT_TIME);
// Test that "andy" and "ice" are removed
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 0, MAX_WAIT_TIME);
// Test Maintenance acl purge
purgeAclId(acl2.getId());
// Test Maintenance node purge
purgeNodeId(fileNode.getId());
purgeTransactionId(txn2.getId());
// paul should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 0, MAX_WAIT_TIME);
// Refects the purged node and transaction
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1001, MAX_WAIT_TIME);
logger.info("#################### Passed Eighteenth Test ##############################");
purgeAclChangeSetId(aclChangeSet.getId());
// alan should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 0, MAX_WAIT_TIME);
// Fix the error node
errorMetaData.setNodeRef(new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID()));
// Reload the error node.
logger.info("Retry the error node");
retry();
// The error in the index should disappear.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 0, MAX_WAIT_TIME);
// And the error node should be present
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(errorNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Nineteenth Test ##############################");
// assert(false);
}
use of org.alfresco.solr.client.AclChangeSet in project SearchServices by Alfresco.
the class AclTracker method indexAclChangeSets.
protected void indexAclChangeSets() throws AuthenticationException, IOException, JSONException {
boolean requiresCommit = false;
while (aclChangeSetsToIndex.peek() != null) {
Long aclChangeSetId = aclChangeSetsToIndex.poll();
if (aclChangeSetId != null) {
AclChangeSets aclChangeSets = client.getAclChangeSets(null, aclChangeSetId, null, aclChangeSetId + 1, 1);
if ((aclChangeSets.getAclChangeSets().size() > 0) && aclChangeSetId.equals(aclChangeSets.getAclChangeSets().get(0).getId())) {
AclChangeSet changeSet = aclChangeSets.getAclChangeSets().get(0);
List<Acl> acls = client.getAcls(Collections.singletonList(changeSet), null, Integer.MAX_VALUE);
for (Acl acl : acls) {
List<AclReaders> readers = client.getAclReaders(Collections.singletonList(acl));
indexAcl(readers, false);
}
this.infoSrv.indexAclTransaction(changeSet, false);
requiresCommit = true;
}
}
checkShutdown();
}
if (requiresCommit) {
checkShutdown();
// this.infoSrv.commit();
}
}
use of org.alfresco.solr.client.AclChangeSet in project SearchServices by Alfresco.
the class AclTracker method indexBatchOfChangeSets.
private int indexBatchOfChangeSets(List<AclChangeSet> changeSetBatch) throws AuthenticationException, IOException, JSONException {
int aclCount = 0;
ArrayList<AclChangeSet> nonEmptyChangeSets = new ArrayList<AclChangeSet>(changeSetBatch.size());
for (AclChangeSet set : changeSetBatch) {
if (set.getAclCount() > 0) {
nonEmptyChangeSets.add(set);
}
}
ArrayList<Acl> aclBatch = new ArrayList<Acl>();
List<Acl> acls = client.getAcls(nonEmptyChangeSets, null, Integer.MAX_VALUE);
for (Acl acl : acls) {
if (log.isDebugEnabled()) {
log.debug(acl.toString());
}
aclBatch.add(acl);
if (aclBatch.size() > aclBatchSize) {
aclCount += aclBatch.size();
AclIndexWorkerRunnable aiwr = new AclIndexWorkerRunnable(this.threadHandler, aclBatch);
this.threadHandler.scheduleTask(aiwr);
aclBatch = new ArrayList<Acl>();
}
}
if (aclBatch.size() > 0) {
aclCount += aclBatch.size();
AclIndexWorkerRunnable aiwr = new AclIndexWorkerRunnable(this.threadHandler, aclBatch);
this.threadHandler.scheduleTask(aiwr);
aclBatch = new ArrayList<Acl>();
}
return aclCount;
}
Aggregations