use of org.alfresco.httpclient.AuthenticationException in project SearchServices by Alfresco.
the class SolrInformationServer method cascadeNodes.
public void cascadeNodes(List<NodeMetaData> nodeMetaDatas, boolean overwrite) throws IOException, AuthenticationException, JSONException {
SolrQueryRequest request = null;
UpdateRequestProcessor processor = null;
try {
request = getLocalSolrQueryRequest();
processor = this.core.getUpdateProcessingChain(null).createProcessor(request, new SolrQueryResponse());
for (NodeMetaData nodeMetaData : nodeMetaDatas) {
if (mayHaveChildren(nodeMetaData)) {
cascadeUpdateV2(nodeMetaData, overwrite, request, processor);
}
}
} catch (Exception e) {
for (NodeMetaData node : nodeMetaDatas) {
// TODO Add a cascadeNode method
}
} finally {
if (processor != null) {
processor.finish();
}
if (request != null) {
request.close();
}
}
}
use of org.alfresco.httpclient.AuthenticationException in project SearchServices by Alfresco.
the class SolrInformationServer method indexNodes.
@Override
public void indexNodes(List<Node> nodes, boolean overwrite, boolean cascade) throws IOException, AuthenticationException, JSONException {
SolrQueryRequest request = null;
UpdateRequestProcessor processor = null;
try {
request = getLocalSolrQueryRequest();
processor = this.core.getUpdateProcessingChain(null).createProcessor(request, new SolrQueryResponse());
Map<Long, Node> nodeIdsToNodes = new HashMap<>();
EnumMap<SolrApiNodeStatus, List<Long>> nodeStatusToNodeIds = new EnumMap<SolrApiNodeStatus, List<Long>>(SolrApiNodeStatus.class);
categorizeNodes(nodes, nodeIdsToNodes, nodeStatusToNodeIds);
List<Long> deletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.DELETED));
List<Long> shardDeletedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_DELETED));
List<Long> shardUpdatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.NON_SHARD_UPDATED));
List<Long> unknownNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UNKNOWN));
List<Long> updatedNodeIds = mapNullToEmptyList(nodeStatusToNodeIds.get(SolrApiNodeStatus.UPDATED));
if (!deletedNodeIds.isEmpty() || !shardDeletedNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty()) {
// fix up any secondary paths
List<NodeMetaData> nodeMetaDatas = new ArrayList<>();
// For all deleted nodes, fake the node metadata
for (Long deletedNodeId : deletedNodeIds) {
Node node = nodeIdsToNodes.get(deletedNodeId);
NodeMetaData nodeMetaData = createDeletedNodeMetaData(node);
nodeMetaDatas.add(nodeMetaData);
}
if (!unknownNodeIds.isEmpty()) {
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
nmdp.setNodeIds(unknownNodeIds);
nodeMetaDatas.addAll(repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE));
}
for (NodeMetaData nodeMetaData : nodeMetaDatas) {
Node node = nodeIdsToNodes.get(nodeMetaData.getId());
if (nodeMetaData.getTxnId() > node.getTxnId()) {
// it will be indexed later
continue;
}
if (nodeMetaData != null) {
try {
// Lock the node to ensure that no other trackers work with this node until this code completes.
if (!spinLock(nodeMetaData.getId(), 120000)) {
// We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
throw new Exception("Unable to acquire lock on nodeId:" + nodeMetaData.getId());
}
solrContentStore.removeDocFromContentStore(nodeMetaData);
} finally {
unlock(nodeMetaData.getId());
}
}
}
if (log.isDebugEnabled()) {
log.debug(".. deleting");
}
DeleteUpdateCommand delDocCmd = new DeleteUpdateCommand(request);
String query = this.cloud.getQuery(FIELD_DBID, OR, deletedNodeIds, shardDeletedNodeIds, shardUpdatedNodeIds, unknownNodeIds);
delDocCmd.setQuery(query);
processor.processDelete(delDocCmd);
}
if (!updatedNodeIds.isEmpty() || !unknownNodeIds.isEmpty() || !shardUpdatedNodeIds.isEmpty()) {
log.info(".. updating");
NodeMetaDataParameters nmdp = new NodeMetaDataParameters();
List<Long> nodeIds = new LinkedList<>();
nodeIds.addAll(updatedNodeIds);
nodeIds.addAll(unknownNodeIds);
nodeIds.addAll(shardUpdatedNodeIds);
nmdp.setNodeIds(nodeIds);
// Fetches bulk metadata
List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE);
NEXT_NODE: for (NodeMetaData nodeMetaData : nodeMetaDatas) {
// System.out.println("####################### NodeMetaData:"+ nodeMetaData.getId());
long start = System.nanoTime();
Node node = nodeIdsToNodes.get(nodeMetaData.getId());
long nodeId = node.getId();
try {
// Lock the node to ensure that no other trackers work with this node until this code completes.
if (!spinLock(nodeId, 120000)) {
// We haven't acquired the lock in over 2 minutes. This really shouldn't happen unless something has gone wrong.
throw new Exception("Unable to acquire lock on nodeId:" + nodeId);
}
if (nodeMetaData.getTxnId() > node.getTxnId()) {
// it will be indexed later
continue;
}
if (nodeIdsToNodes.get(nodeMetaData.getId()).getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED) {
if (nodeMetaData.getProperties().get(ContentModel.PROP_CASCADE_TX) != null) {
indexNonShardCascade(nodeMetaData);
}
continue;
}
AddUpdateCommand addDocCmd = new AddUpdateCommand(request);
addDocCmd.overwrite = overwrite;
// check index control
Map<QName, PropertyValue> properties = nodeMetaData.getProperties();
StringPropertyValue pValue = (StringPropertyValue) properties.get(ContentModel.PROP_IS_INDEXED);
if (pValue != null) {
Boolean isIndexed = Boolean.valueOf(pValue.getValue());
if (!isIndexed.booleanValue()) {
if (log.isDebugEnabled()) {
log.debug(".. clearing unindexed");
}
deleteNode(processor, request, node);
SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_UNINDEXED_NODE);
addDocCmd.solrDoc = doc;
if (recordUnindexedNodes) {
solrContentStore.storeDocOnSolrContentStore(nodeMetaData, doc);
processor.processAdd(addDocCmd);
}
long end = System.nanoTime();
this.trackerStats.addNodeTime(end - start);
continue NEXT_NODE;
}
}
// Make sure any unindexed or error doc is removed.
if (log.isDebugEnabled()) {
log.debug(".. deleting node " + node.getId());
}
deleteNode(processor, request, node);
SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_NODE);
addToNewDocAndCache(nodeMetaData, doc);
addDocCmd.solrDoc = doc;
// System.out.println("###################### indexing doc:"+doc.toString());
processor.processAdd(addDocCmd);
long end = System.nanoTime();
this.trackerStats.addNodeTime(end - start);
} finally {
// release the lock on the node so other trackers can access the node.
unlock(nodeId);
}
}
// Ends iteration over nodeMetadatas
}
// Ends checking for the existence of updated or unknown node ids
} catch (Exception e) {
log.error("SolrInformationServer problem", e);
// Bulk version failed, so do one at a time.
for (Node node : nodes) {
this.indexNode(node, true);
}
} finally {
if (processor != null) {
processor.finish();
}
if (request != null) {
request.close();
}
}
}
use of org.alfresco.httpclient.AuthenticationException in project SearchServices by Alfresco.
the class AclTracker method getFullNodesForDbTransaction.
public List<Node> getFullNodesForDbTransaction(Long txid) {
try {
GetNodesParameters gnp = new GetNodesParameters();
ArrayList<Long> txs = new ArrayList<Long>();
txs.add(txid);
gnp.setTransactionIds(txs);
gnp.setStoreProtocol(storeRef.getProtocol());
gnp.setStoreIdentifier(storeRef.getIdentifier());
return client.getNodes(gnp, Integer.MAX_VALUE);
} catch (IOException e) {
throw new AlfrescoRuntimeException("Failed to get nodes", e);
} catch (JSONException e) {
throw new AlfrescoRuntimeException("Failed to get nodes", e);
} catch (AuthenticationException e) {
throw new AlfrescoRuntimeException("Failed to get nodes", e);
}
}
use of org.alfresco.httpclient.AuthenticationException in project SearchServices by Alfresco.
the class AclTracker method getAclsForDbAclTransaction.
/**
* @param acltxid Long
* @return List<Long>
*/
public List<Long> getAclsForDbAclTransaction(Long acltxid) {
try {
ArrayList<Long> answer = new ArrayList<Long>();
AclChangeSets changeSet = client.getAclChangeSets(null, acltxid, null, acltxid + 1, 1);
List<Acl> acls = client.getAcls(changeSet.getAclChangeSets(), null, Integer.MAX_VALUE);
for (Acl acl : acls) {
answer.add(acl.getId());
}
return answer;
} catch (IOException e) {
throw new AlfrescoRuntimeException("Failed to get acls", e);
} catch (JSONException e) {
throw new AlfrescoRuntimeException("Failed to get acls", e);
} catch (AuthenticationException e) {
throw new AlfrescoRuntimeException("Failed to get acls", e);
}
}
use of org.alfresco.httpclient.AuthenticationException in project SearchServices by Alfresco.
the class AclTracker method checkAcl.
public AclReport checkAcl(Long aclid) {
AclReport aclReport = new AclReport();
aclReport.setAclId(aclid);
try {
List<AclReaders> readers = client.getAclReaders(Collections.singletonList(new Acl(0, aclid)));
aclReport.setExistsInDb(readers.size() == 1);
} catch (IOException | JSONException | AuthenticationException e) {
aclReport.setExistsInDb(false);
}
// In Index
return this.infoSrv.checkAclInIndex(aclid, aclReport);
}
Aggregations