use of org.alfresco.service.cmr.repository.StoreRef in project SearchServices by Alfresco.
the class AlfrescoSolrTrackerTest method testTrackers.
@Test
public void testTrackers() throws Exception {
/*
* Create and index an AclChangeSet.
*/
logger.info("######### Starting tracker test ###########");
AclChangeSet aclChangeSet = getAclChangeSet(1);
Acl acl = getAcl(aclChangeSet);
// Test with long value
Acl acl2 = getAcl(aclChangeSet, Long.MAX_VALUE - 10);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null);
AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null);
indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2));
// Check for the ACL state stamp.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed First Test ##############################");
/*
* Create and index a Transaction
*/
// First create a transaction.
Transaction txn = getTransaction(0, 2);
// Next create two nodes to update for the transaction
Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
logger.info("######### error node:" + errorNode.getId());
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false);
NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
// The errorNodeMetaData will cause an exception.
NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true);
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(txn, list(errorNode, folderNode, fileNode), list(errorMetaData, folderMetaData, fileMetaData));
// Check for the TXN state stamp.
logger.info("#################### Started Second Test ##############################");
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed Second Test ##############################");
/*
* Query the index for the content
*/
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Third Test ##############################");
ModifiableSolrParams params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
SolrServletRequest req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"joel\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourth Test ##############################");
// Check for the error doc
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Fifth Test ##############################");
// Mark the folder as needing cascade
Transaction txn1 = getTransaction(0, 1);
// Update the properties on the Node and NodeMetaData to simulate an update to the Node.
folderMetaData.getProperties().put(ContentModel.PROP_CASCADE_TX, new StringPropertyValue(Long.toString(txn1.getId())));
// Update the txnId
folderNode.setTxnId(txn1.getId());
folderMetaData.setTxnId(txn1.getId());
// Change the ancestor on the file just to see if it's been updated
NodeRef nodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
fileMetaData.setAncestors(ancestors(nodeRef));
// This will add the PROP_CASCADE_TX property to the folder.
logger.info("################### ADDING CASCADE TRANSACTION #################");
indexTransaction(txn1, list(folderNode), list(folderMetaData));
// Check for the TXN state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn1.getId(), txn1.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Sixth Test ##############################");
TermQuery termQuery1 = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString()));
waitForDocCount(termQuery1, 1, MAX_WAIT_TIME);
params = new ModifiableSolrParams();
params.add("q", QueryConstants.FIELD_ANCESTOR + ":\"" + nodeRef.toString() + "\"");
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"mike\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventh Test ##############################");
// Check that both documents have been indexed and have content.
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
logger.info("#################### Passed Eighth Test ##############################");
// Add document with isContentIndexed=false
Transaction txnNoContent = getTransaction(0, 1);
Node noContentNode = getNode(txnNoContent, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData noContentMetaData = getNodeMetaData(noContentNode, txnNoContent, acl, "mike", null, false);
noContentMetaData.getProperties().put(ContentModel.PROP_IS_CONTENT_INDEXED, new StringPropertyValue("false"));
noContentMetaData.getProperties().put(ContentModel.PROP_CONTENT, new ContentPropertyValue(Locale.UK, 298L, "UTF-8", "text/json", null));
indexTransaction(txnNoContent, list(noContentNode), list(noContentMetaData));
// This tests that the mime type has been added for this document. It is the only document with text/json in the index.
waitForDocCount(new TermQuery(new Term("content@s__mimetype@{http://www.alfresco.org/model/content/1.0}content", "text/json")), 1, MAX_WAIT_TIME);
// Many of the tests beyond this point rely on a specific count of documents in the index that have content.
// This document should not have had the content indexed so the tests following will pass.
// If the content had been indexed the tests following this one would have failed.
// This proves that the ContentModel.PROP_IS_CONTENT_INDEXED property is being followed by the tracker
// Try bulk loading
Transaction txn2 = getTransaction(0, 550);
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
for (int i = 0; i < 550; i++) {
Node n = getNode(txn2, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txn2, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodes.add(n);
nodeMetaDatas.add(nm);
}
logger.info("############################ Bulk Nodes:" + nodes.size());
indexTransaction(txn2, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 552, MAX_WAIT_TIME);
logger.info("#################### Passed Ninth Test ##############################");
for (int i = 0; i < 1000; i++) {
Transaction txnX = getTransaction(0, 1);
List<Node> nodesX = new ArrayList();
List<NodeMetaData> nodeMetaDatasX = new ArrayList();
Node n = getNode(txnX, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txnX, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodesX.add(n);
nodeMetaDatasX.add(nm);
indexTransaction(txnX, nodesX, nodeMetaDatasX);
}
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1552, MAX_WAIT_TIME);
logger.info("#################### Passed Tenth Test ##############################");
// Test the maintenance methods
fileMetaData.setOwner("amy");
reindexTransactionId(txn.getId());
folderMetaData.setOwner("jill");
reindexNodeId(folderNode.getId());
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "amy")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Eleventh Test ##############################");
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "jill")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Twelth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"amy\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + folderNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"jill\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + folderNode.getId() + "']");
logger.info("#################### Passed Fifteenth Test ##############################");
List<String> readers = aclReaders.getReaders();
// Change the aclReader
readers.set(0, "andy");
indexAclId(acl.getId());
List<String> readers2 = aclReaders2.getReaders();
// Change the aclReader
readers2.set(0, "ice");
reindexAclId(acl2.getId());
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 1, MAX_WAIT_TIME);
// Ice should have replaced jim in acl2.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 0, MAX_WAIT_TIME);
logger.info("#################### Passed Sixteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"andy\"], \"tenants\": [ \"\" ]}");
// FIX ME assertQ(req, "*[count(//doc)=1]","//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventeenth Test ##############################");
// Change the aclReader
readers.set(0, "alan");
// Change the aclReader
readers2.set(0, "paul");
// This should replace "andy" and "ice" with "alan" and "paul"
reindexAclChangeSetId(aclChangeSet.getId());
// Test that "alan" and "paul" are in the index
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 1, MAX_WAIT_TIME);
// Test that "andy" and "ice" are removed
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 0, MAX_WAIT_TIME);
// Test Maintenance acl purge
purgeAclId(acl2.getId());
// Test Maintenance node purge
purgeNodeId(fileNode.getId());
purgeTransactionId(txn2.getId());
// paul should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 0, MAX_WAIT_TIME);
// Refects the purged node and transaction
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1001, MAX_WAIT_TIME);
logger.info("#################### Passed Eighteenth Test ##############################");
purgeAclChangeSetId(aclChangeSet.getId());
// alan should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 0, MAX_WAIT_TIME);
// Fix the error node
errorMetaData.setNodeRef(new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID()));
// Reload the error node.
logger.info("Retry the error node");
retry();
// The error in the index should disappear.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 0, MAX_WAIT_TIME);
// And the error node should be present
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(errorNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Nineteenth Test ##############################");
}
use of org.alfresco.service.cmr.repository.StoreRef in project SearchServices by Alfresco.
the class LoadCMISData method addSortableNull.
/**
* @param folder00NodeRef
* @param rootNodeRef
* @param baseFolderNodeRef
* @param baseFolderQName
* @param folder00QName
* @param date1
* @param id
* @param offset
* @throws IOException
*/
private void addSortableNull(NodeRef folder00NodeRef, NodeRef rootNodeRef, NodeRef baseFolderNodeRef, Object baseFolderQName, Object folder00QName, Date date1, String id, int offset) throws IOException {
HashMap<QName, PropertyValue> content00Properties = new HashMap<QName, PropertyValue>();
MLTextPropertyValue desc00 = new MLTextPropertyValue();
desc00.addValue(Locale.ENGLISH, "Test null");
content00Properties.put(ContentModel.PROP_DESCRIPTION, desc00);
content00Properties.put(ContentModel.PROP_TITLE, desc00);
content00Properties.put(ContentModel.PROP_NAME, new StringPropertyValue("Test null"));
content00Properties.put(ContentModel.PROP_CREATED, new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, date1)));
NodeRef content00NodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
QName content00QName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Test null");
ChildAssociationRef content00CAR = new ChildAssociationRef(ContentModel.ASSOC_CONTAINS, folder00NodeRef, content00QName, content00NodeRef, true, 0);
addNode(h.getCore(), dataModel, 1, 200 + offset, 1, extendedContent, new QName[] { ContentModel.ASPECT_OWNABLE, ContentModel.ASPECT_TITLED }, content00Properties, null, "andy", new ChildAssociationRef[] { content00CAR }, new NodeRef[] { baseFolderNodeRef, rootNodeRef, folder00NodeRef }, new String[] { "/" + baseFolderQName.toString() + "/" + folder00QName.toString() + "/" + content00QName.toString() }, content00NodeRef, true);
}
use of org.alfresco.service.cmr.repository.StoreRef in project SearchServices by Alfresco.
the class LoadCMISData method addSortableNode.
protected void addSortableNode(NodeRef folder00NodeRef, NodeRef rootNodeRef, NodeRef baseFolderNodeRef, Object baseFolderQName, Object folder00QName, Date date1, int position) throws IOException {
HashMap<QName, PropertyValue> content00Properties = new HashMap<QName, PropertyValue>();
MLTextPropertyValue desc00 = new MLTextPropertyValue();
desc00.addValue(Locale.ENGLISH, "Test " + position);
content00Properties.put(ContentModel.PROP_DESCRIPTION, desc00);
content00Properties.put(ContentModel.PROP_TITLE, desc00);
content00Properties.put(ContentModel.PROP_NAME, new StringPropertyValue("Test " + position));
content00Properties.put(ContentModel.PROP_CREATED, new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, date1)));
StringPropertyValue single = new StringPropertyValue(orderable[position]);
content00Properties.put(singleTextUntokenised, single);
content00Properties.put(singleTextTokenised, single);
content00Properties.put(singleTextBoth, single);
MultiPropertyValue multi = new MultiPropertyValue();
multi.addValue(single);
multi.addValue(new StringPropertyValue(orderable[position + 1]));
content00Properties.put(multipleTextUntokenised, multi);
content00Properties.put(multipleTextTokenised, multi);
content00Properties.put(multipleTextBoth, multi);
content00Properties.put(singleMLTextUntokenised, makeMLText(position));
content00Properties.put(singleMLTextTokenised, makeMLText(position));
content00Properties.put(singleMLTextBoth, makeMLText(position));
content00Properties.put(multipleMLTextUntokenised, makeMLTextMVP(position));
content00Properties.put(multipleMLTextTokenised, makeMLTextMVP(position));
content00Properties.put(multipleMLTextBoth, makeMLTextMVP());
StringPropertyValue one = new StringPropertyValue("" + (1.1 * position));
StringPropertyValue two = new StringPropertyValue("" + (2.2 * position));
MultiPropertyValue multiDec = new MultiPropertyValue();
multiDec.addValue(one);
multiDec.addValue(two);
content00Properties.put(singleFloat, one);
content00Properties.put(multipleFloat, multiDec);
content00Properties.put(singleDouble, one);
content00Properties.put(multipleDouble, multiDec);
one = new StringPropertyValue("" + (1 * position));
two = new StringPropertyValue("" + (2 * position));
MultiPropertyValue multiInt = new MultiPropertyValue();
multiInt.addValue(one);
multiInt.addValue(two);
content00Properties.put(singleInteger, one);
content00Properties.put(multipleInteger, multiInt);
content00Properties.put(singleLong, one);
content00Properties.put(multipleLong, multiInt);
GregorianCalendar cal = new GregorianCalendar();
cal.setTime(date1);
cal.add(Calendar.DAY_OF_MONTH, position);
Date newdate1 = cal.getTime();
cal.add(Calendar.DAY_OF_MONTH, -1);
cal.add(Calendar.DAY_OF_MONTH, 2);
Date date2 = cal.getTime();
StringPropertyValue d1 = new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, newdate1));
StringPropertyValue d2 = new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, date2));
MultiPropertyValue multiDate = new MultiPropertyValue();
multiDate.addValue(d1);
multiDate.addValue(d2);
content00Properties.put(singleDate, d1);
content00Properties.put(multipleDate, multiDate);
content00Properties.put(singleDatetime, d1);
content00Properties.put(multipleDatetime, multiDate);
StringPropertyValue b = new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, position % 2 == 0 ? true : false));
StringPropertyValue bTrue = new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, true));
StringPropertyValue bFalse = new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, false));
MultiPropertyValue multiBool = new MultiPropertyValue();
multiBool.addValue(bTrue);
multiBool.addValue(bFalse);
content00Properties.put(singleBoolean, b);
content00Properties.put(multipleBoolean, multiBool);
NodeRef content00NodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
QName content00QName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Test " + position);
ChildAssociationRef content00CAR = new ChildAssociationRef(ContentModel.ASSOC_CONTAINS, folder00NodeRef, content00QName, content00NodeRef, true, 0);
addNode(h.getCore(), dataModel, 1, 1000 + position, 1, extendedContent, new QName[] { ContentModel.ASPECT_OWNABLE, ContentModel.ASPECT_TITLED }, content00Properties, null, "andy", new ChildAssociationRef[] { content00CAR }, new NodeRef[] { baseFolderNodeRef, rootNodeRef, folder00NodeRef }, new String[] { "/" + baseFolderQName.toString() + "/" + folder00QName.toString() + "/" + content00QName.toString() }, content00NodeRef, true);
}
use of org.alfresco.service.cmr.repository.StoreRef in project SearchServices by Alfresco.
the class AlfrescoSolrTrackerExceptionTest method testTrackers.
@Test
public void testTrackers() throws Exception {
/*
* Create and index an AclChangeSet.
*/
logger.info("######### Starting tracker test ###########");
AclChangeSet aclChangeSet = getAclChangeSet(1, 1);
Acl acl = getAcl(aclChangeSet);
Acl acl2 = getAcl(aclChangeSet);
AclReaders aclReaders = getAclReaders(aclChangeSet, acl, list("joel"), list("phil"), null);
AclReaders aclReaders2 = getAclReaders(aclChangeSet, acl2, list("jim"), list("phil"), null);
Thread.sleep(20000);
SOLRAPIQueueClient.setThrowException(false);
logger.info("#################### Stop throwing exceptions ##############################");
indexAclChangeSet(aclChangeSet, list(acl, acl2), list(aclReaders, aclReaders2));
// Check for the ACL state stamp.
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!ACLTX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_ACLTXID, aclChangeSet.getId(), aclChangeSet.getId() + 1, true, false), BooleanClause.Occur.MUST));
BooleanQuery waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed First Test ##############################");
// assert(false);
/*
* Create and index a Transaction
*/
// First create a transaction.
Transaction txn = getTransaction(0, 2, 1);
// Next create two nodes to update for the transaction
Node folderNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node fileNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
Node errorNode = getNode(txn, acl, Node.SolrApiNodeStatus.UPDATED);
logger.info("######### error node:" + errorNode.getId());
// Next create the NodeMetaData for each node. TODO: Add more metadata
NodeMetaData folderMetaData = getNodeMetaData(folderNode, txn, acl, "mike", null, false);
NodeMetaData fileMetaData = getNodeMetaData(fileNode, txn, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
// The errorNodeMetaData will cause an exception.
NodeMetaData errorMetaData = getNodeMetaData(errorNode, txn, acl, "lisa", ancestors(folderMetaData.getNodeRef()), true);
// Index the transaction, nodes, and nodeMetaDatas.
// Note that the content is automatically created by the test framework.
indexTransaction(txn, list(errorNode, folderNode, fileNode), list(errorMetaData, folderMetaData, fileMetaData));
logger.info("#################### Start throwing exceptions ##############################");
SOLRAPIQueueClient.setThrowException(true);
Thread.sleep(10000);
SOLRAPIQueueClient.setThrowException(false);
logger.info("#################### Stop throwing exceptions ##############################");
// Check for the TXN state stamp.
logger.info("#################### Started Second Test ##############################");
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn.getId(), txn.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForQuery = builder.build();
waitForDocCount(waitForQuery, 1, MAX_WAIT_TIME);
logger.info("#################### Passed Second Test ##############################");
/*
* Query the index for the content
*/
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Third Test ##############################");
ModifiableSolrParams params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
SolrServletRequest req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"joel\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourth Test ##############################");
// Check for the error doc
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Fifth Test ##############################");
// Mark the folder as needing cascade
Transaction txn1 = getTransaction(0, 1, 2);
// Update the properties on the Node and NodeMetaData to simulate an update to the Node.
folderMetaData.getProperties().put(ContentModel.PROP_CASCADE_TX, new StringPropertyValue(Long.toString(txn1.getId())));
// Update the txnId
folderNode.setTxnId(txn1.getId());
folderMetaData.setTxnId(txn1.getId());
// Change the ancestor on the file just to see if it's been updated
NodeRef nodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
fileMetaData.setAncestors(ancestors(nodeRef));
// This will add the PROP_CASCADE_TX property to the folder.
logger.info("################### ADDING CASCADE TRANSACTION #################");
indexTransaction(txn1, list(folderNode), list(folderMetaData));
// Check for the TXN state stamp.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_SOLR4_ID, "TRACKER!STATE!TX")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(LegacyNumericRangeQuery.newLongRange(QueryConstants.FIELD_S_TXID, txn1.getId(), txn1.getId() + 1, true, false), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Sixth Test ##############################");
TermQuery termQuery1 = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString()));
waitForDocCount(termQuery1, 1, MAX_WAIT_TIME);
params = new ModifiableSolrParams();
params.add("q", QueryConstants.FIELD_ANCESTOR + ":\"" + nodeRef.toString() + "\"");
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"mike\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventh Test ##############################");
// Check that both documents have been indexed and have content.
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 2, MAX_WAIT_TIME);
logger.info("#################### Passed Eighth Test ##############################");
// Try bulk loading
Transaction txn2 = getTransaction(0, 550);
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
for (int i = 0; i < 550; i++) {
Node n = getNode(txn2, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txn2, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodes.add(n);
nodeMetaDatas.add(nm);
}
logger.info("############################ Bulk Nodes:" + nodes.size());
indexTransaction(txn2, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 552, MAX_WAIT_TIME);
logger.info("#################### Passed Ninth Test ##############################");
for (int i = 0; i < 1000; i++) {
Transaction txnX = getTransaction(0, 1);
List<Node> nodesX = new ArrayList();
List<NodeMetaData> nodeMetaDatasX = new ArrayList();
Node n = getNode(txnX, acl, Node.SolrApiNodeStatus.UPDATED);
NodeMetaData nm = getNodeMetaData(n, txnX, acl, "mike", ancestors(folderMetaData.getNodeRef()), false);
nodesX.add(n);
nodeMetaDatasX.add(nm);
indexTransaction(txnX, nodesX, nodeMetaDatasX);
}
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1552, MAX_WAIT_TIME);
logger.info("#################### Passed Tenth Test ##############################");
// Test the maintenance methods
fileMetaData.setOwner("amy");
reindexTransactionId(txn.getId());
folderMetaData.setOwner("jill");
reindexNodeId(folderNode.getId());
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "amy")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Eleventh Test ##############################");
// Wait for a document that has the new owner and the content populated.
builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), BooleanClause.Occur.MUST));
builder.add(new BooleanClause(new TermQuery(new Term(QueryConstants.FIELD_OWNER, "jill")), BooleanClause.Occur.MUST));
waitForDocCount(builder.build(), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Twelth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"amy\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Fourteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + folderNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"jill\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + folderNode.getId() + "']");
logger.info("#################### Passed Fifteenth Test ##############################");
List<String> readers = aclReaders.getReaders();
// Change the aclReader
readers.set(0, "andy");
indexAclId(acl.getId());
List<String> readers2 = aclReaders2.getReaders();
// Change the aclReader
readers2.set(0, "ice");
reindexAclId(acl2.getId());
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 1, MAX_WAIT_TIME);
// Ice should have replaced jim in acl2.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "jim")), 0, MAX_WAIT_TIME);
logger.info("#################### Passed Sixteenth Test ##############################");
params = new ModifiableSolrParams();
// Query for an id in the content field. The node id is automatically populated into the content field by test framework
params.add("q", "t1:" + fileNode.getId());
params.add("qt", "/afts");
params.add("start", "0");
params.add("rows", "6");
params.add("sort", "id asc");
params.add("fq", "{!afts}AUTHORITY_FILTER_FROM_JSON");
req = areq(params, "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"andy\"], \"tenants\": [ \"\" ]}");
assertQ(req, "*[count(//doc)=1]", "//result/doc[1]/long[@name='DBID'][.='" + fileNode.getId() + "']");
logger.info("#################### Passed Seventeenth Test ##############################");
// Change the aclReader
readers.set(0, "alan");
// Change the aclReader
readers2.set(0, "paul");
// This should replace "andy" and "ice" with "alan" and "paul"
reindexAclChangeSetId(aclChangeSet.getId());
// Test that "alan" and "paul" are in the index
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 1, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 1, MAX_WAIT_TIME);
// Test that "andy" and "ice" are removed
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "andy")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "ice")), 0, MAX_WAIT_TIME);
// Test Maintenance acl purge
purgeAclId(acl2.getId());
// Test Maintenance node purge
purgeNodeId(fileNode.getId());
purgeTransactionId(txn2.getId());
// paul should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "paul")), 0, MAX_WAIT_TIME);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(fileNode.getId()))), 0, MAX_WAIT_TIME);
// Refects the purged node and transaction
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 1001, MAX_WAIT_TIME);
logger.info("#################### Passed Eighteenth Test ##############################");
purgeAclChangeSetId(aclChangeSet.getId());
// alan should be purged
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_READER, "alan")), 0, MAX_WAIT_TIME);
// Fix the error node
errorMetaData.setNodeRef(new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID()));
// Reload the error node.
logger.info("Retry the error node");
retry();
// The error in the index should disappear.
waitForDocCount(new TermQuery(new Term(QueryConstants.FIELD_DOC_TYPE, "ErrorNode")), 0, MAX_WAIT_TIME);
// And the error node should be present
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", Long.toString(errorNode.getId()))), 1, MAX_WAIT_TIME);
logger.info("#################### Passed Nineteenth Test ##############################");
// assert(false);
}
use of org.alfresco.service.cmr.repository.StoreRef in project SearchServices by Alfresco.
the class AuthDataLoad method setup.
@BeforeClass
public static void setup() throws Exception {
// Start test haness
initAlfrescoCore("schema.xml");
// Root
NodeRef rootNodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
addStoreRoot(h.getCore(), dataModel, rootNodeRef, 1, 1, 1, 1);
// rsp.add("StoreRootNode", 1);
// Base
HashMap<QName, PropertyValue> baseFolderProperties = new HashMap<QName, PropertyValue>();
baseFolderProperties.put(ContentModel.PROP_NAME, new StringPropertyValue("Base Folder"));
NodeRef baseFolderNodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
QName baseFolderQName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "baseFolder");
ChildAssociationRef n01CAR = new ChildAssociationRef(ContentModel.ASSOC_CHILDREN, rootNodeRef, baseFolderQName, baseFolderNodeRef, true, 0);
addNode(h.getCore(), dataModel, 1, 2, 1, ContentModel.TYPE_FOLDER, null, baseFolderProperties, null, "andy", new ChildAssociationRef[] { n01CAR }, new NodeRef[] { rootNodeRef }, new String[] { "/" + baseFolderQName.toString() }, baseFolderNodeRef, true);
// Folders
HashMap<QName, PropertyValue> folder00Properties = new HashMap<QName, PropertyValue>();
folder00Properties.put(ContentModel.PROP_NAME, new StringPropertyValue("Folder 0"));
NodeRef folder00NodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
QName folder00QName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Folder 0");
ChildAssociationRef folder00CAR = new ChildAssociationRef(ContentModel.ASSOC_CONTAINS, baseFolderNodeRef, folder00QName, folder00NodeRef, true, 0);
addNode(h.getCore(), dataModel, 1, 3, 1, ContentModel.TYPE_FOLDER, null, folder00Properties, null, "andy", new ChildAssociationRef[] { folder00CAR }, new NodeRef[] { baseFolderNodeRef, rootNodeRef }, new String[] { "/" + baseFolderQName.toString() + "/" + folder00QName.toString() }, folder00NodeRef, true);
for (long i = 0; i < count; i++) {
addAcl(h.getCore(), dataModel, 10 + (int) i, 10 + (int) i, (int) (i % maxReader), (int) maxReader);
HashMap<QName, PropertyValue> content00Properties = new HashMap<QName, PropertyValue>();
MLTextPropertyValue desc00 = new MLTextPropertyValue();
desc00.addValue(Locale.ENGLISH, "Doc " + i);
desc00.addValue(Locale.US, "Doc " + i);
content00Properties.put(ContentModel.PROP_DESCRIPTION, desc00);
content00Properties.put(ContentModel.PROP_TITLE, desc00);
content00Properties.put(ContentModel.PROP_CONTENT, new ContentPropertyValue(Locale.UK, 0l, "UTF-8", "text/plain", null));
content00Properties.put(ContentModel.PROP_NAME, new StringPropertyValue("Doc " + i));
content00Properties.put(ContentModel.PROP_CREATOR, new StringPropertyValue("Test"));
content00Properties.put(ContentModel.PROP_MODIFIER, new StringPropertyValue("Test"));
content00Properties.put(ContentModel.PROP_VERSION_LABEL, new StringPropertyValue("1.0"));
content00Properties.put(ContentModel.PROP_OWNER, new StringPropertyValue("Test"));
Date date00 = new Date();
content00Properties.put(ContentModel.PROP_CREATED, new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, date00)));
content00Properties.put(ContentModel.PROP_MODIFIED, new StringPropertyValue(DefaultTypeConverter.INSTANCE.convert(String.class, date00)));
HashMap<QName, String> content00Content = new HashMap<QName, String>();
content00Content.put(ContentModel.PROP_CONTENT, "Test doc number " + i);
NodeRef content00NodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
QName content00QName = QName.createQName(NamespaceService.CONTENT_MODEL_1_0_URI, "Doc-" + i);
ChildAssociationRef content00CAR = new ChildAssociationRef(ContentModel.ASSOC_CONTAINS, folder00NodeRef, content00QName, content00NodeRef, true, 0);
addNode(h.getCore(), dataModel, 1, 10 + (int) i, 10 + (int) i, ContentModel.TYPE_CONTENT, new QName[] { ContentModel.ASPECT_OWNABLE, ContentModel.ASPECT_TITLED }, content00Properties, content00Content, "andy", new ChildAssociationRef[] { content00CAR }, new NodeRef[] { baseFolderNodeRef, rootNodeRef, folder00NodeRef }, new String[] { "/" + baseFolderQName.toString() + "/" + folder00QName.toString() + "/" + content00QName.toString() }, content00NodeRef, false);
}
h.getCore().getUpdateHandler().commit(new CommitUpdateCommand(req(), false));
}
Aggregations