use of org.exist.dom.persistent.NodeSet in project exist by eXist-db.
the class XMLDBDefragment method eval.
/* (non-Javadoc)
* @see org.exist.xquery.Expression#eval(org.exist.dom.persistent.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item)
*/
public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException {
// Get nodes
final NodeSet nodes = args[0].toNodeSet();
final DocumentSet docs = nodes.getDocumentSet();
try {
if (args.length > 1) {
// Use supplied parameter
final int splitCount = ((IntegerValue) args[1].itemAt(0)).getInt();
Modification.checkFragmentation(context, docs, splitCount);
} else {
// Use conf.xml configured value or -1 if not existent
Modification.checkFragmentation(context, docs);
}
} catch (final LockException | EXistException e) {
logger.error("An error occurred while defragmenting documents: {}", e.getMessage());
throw new XPathException(this, "An error occurred while defragmenting documents: " + e.getMessage(), e);
}
return Sequence.EMPTY_SEQUENCE;
}
use of org.exist.dom.persistent.NodeSet in project exist by eXist-db.
the class FunRemove method eval.
public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException {
if (context.getProfiler().isEnabled()) {
context.getProfiler().start(this);
context.getProfiler().message(this, Profiler.DEPENDENCIES, "DEPENDENCIES", Dependency.getDependenciesName(this.getDependencies()));
if (contextSequence != null) {
context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT SEQUENCE", contextSequence);
}
if (contextItem != null) {
context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT ITEM", contextItem.toSequence());
}
}
Sequence result;
Sequence seq = getArgument(0).eval(contextSequence, contextItem);
if (seq.isEmpty()) {
result = Sequence.EMPTY_SEQUENCE;
} else {
// TODO : explain this Double conversion -pb
int pos = ((DoubleValue) getArgument(1).eval(contextSequence, contextItem).convertTo(Type.DOUBLE)).getInt();
if (pos < 1 || pos > seq.getItemCount()) {
result = seq;
} else {
pos--;
if (seq instanceof NodeSet) {
result = new ExtArrayNodeSet();
result.addAll((NodeSet) seq);
result = ((NodeSet) result).except((NodeSet) seq.itemAt(pos));
} else {
result = new ValueSequence();
for (int i = 0; i < seq.getItemCount(); i++) {
if (i != pos) {
result.add(seq.itemAt(i));
}
}
}
}
}
if (context.getProfiler().isEnabled()) {
context.getProfiler().end(this, "", result);
}
return result;
}
use of org.exist.dom.persistent.NodeSet in project exist by eXist-db.
the class FunResolveQName method eval.
public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException {
if (context.getProfiler().isEnabled()) {
context.getProfiler().start(this);
context.getProfiler().message(this, Profiler.DEPENDENCIES, "DEPENDENCIES", Dependency.getDependenciesName(this.getDependencies()));
if (contextSequence != null) {
context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT SEQUENCE", contextSequence);
}
}
final Sequence qnameSeq = args[0];
if (qnameSeq.isEmpty()) {
return EmptySequence.EMPTY_SEQUENCE;
} else {
context.pushInScopeNamespaces();
final String qnameString = args[0].getStringValue();
if (QName.isQName(qnameString) == VALID.val) {
try {
String prefix = QName.extractPrefix(qnameString);
if (prefix == null) {
prefix = "";
}
String uri = null;
final NodeValue node = (NodeValue) args[1].itemAt(0);
if (node.getImplementationType() == NodeValue.PERSISTENT_NODE) {
NodeProxy proxy = (NodeProxy) node;
final NodeSet ancestors = proxy.getAncestors(contextId, true);
for (NodeProxy ancestor : ancestors) {
proxy = ancestor;
final ElementImpl e = (ElementImpl) proxy.getNode();
uri = findNamespaceURI(e, prefix);
if (uri != null) {
break;
}
}
} else {
NodeImpl next = (NodeImpl) node;
do {
uri = findNamespaceURI((org.exist.dom.memtree.ElementImpl) next, prefix);
if (uri != null) {
break;
} else {
next = (NodeImpl) next.getParentNode();
}
} while (next != null && next.getNodeType() == Node.ELEMENT_NODE);
}
if (uri == null && prefix != null && !prefix.isEmpty()) {
throw new XPathException(this, ErrorCodes.FONS0004, "No namespace found for prefix. No binding for prefix '" + prefix + "' was found.", args[0]);
}
final String localPart = QName.extractLocalName(qnameString);
final QName qn = new QName(localPart, uri, prefix);
final QNameValue result = new QNameValue(context, qn);
if (context.getProfiler().isEnabled()) {
context.getProfiler().end(this, "", result);
}
context.popInScopeNamespaces();
return result;
} catch (final QName.IllegalQNameException e) {
throw new XPathException(this, ErrorCodes.FOCA0002, "Invalid lexical value. '" + qnameString + "' is not a QName.", args[0]);
}
} else {
throw new XPathException(this, ErrorCodes.FOCA0002, "Invalid lexical value. '" + qnameString + "' is not a QName.", args[0]);
}
}
}
use of org.exist.dom.persistent.NodeSet in project exist by eXist-db.
the class Modification method selectAndLock.
/**
* Acquire a lock on all documents processed by this modification. We have
* to avoid that node positions change during the operation.
* feature trigger_update :
* At the same time we leverage on the fact that it's called before
* database modification to call the eventual triggers.
*
* @param transaction the database transaction.
*
* @return The selected document nodes.
*
* @throws LockException if a lock error occurs
* @throws PermissionDeniedException if the caller has insufficient priviledges
* @throws EXistException if the database raises an error
* @throws XPathException if the XPath raises an error
* @throws TriggerException if a trigger raises an error
*/
protected final StoredNode[] selectAndLock(Txn transaction) throws LockException, PermissionDeniedException, EXistException, XPathException, TriggerException {
final java.util.concurrent.locks.Lock globalLock = broker.getBrokerPool().getGlobalUpdateLock();
globalLock.lock();
try {
final NodeList nl = select(docs);
final DocumentSet lockedDocuments = ((NodeSet) nl).getDocumentSet();
// acquire a lock on all documents
// we have to avoid that node positions change
// during the modification
lockedDocumentsLocks = lockedDocuments.lock(broker, true);
final StoredNode[] ql = new StoredNode[nl.getLength()];
for (int i = 0; i < ql.length; i++) {
ql[i] = (StoredNode) nl.item(i);
final DocumentImpl doc = ql[i].getOwnerDocument();
// call the eventual triggers
// TODO -jmv separate loop on docs and not on nodes
// prepare Trigger
prepareTrigger(transaction, doc);
}
return ql;
} finally {
globalLock.unlock();
}
}
use of org.exist.dom.persistent.NodeSet in project exist by eXist-db.
the class LuceneMatchListener method reset.
protected void reset(final DBBroker broker, final NodeProxy proxy) {
this.broker = broker;
this.match = proxy.getMatches();
setNextInChain(null);
final IndexSpec indexConf = proxy.getOwnerDocument().getCollection().getIndexConfiguration(broker);
if (indexConf != null) {
config = (LuceneConfig) indexConf.getCustomIndexSpec(LuceneIndex.ID);
} else {
config = LuceneConfig.DEFAULT_CONFIG;
}
getTerms();
nodesWithMatch = new TreeMap<>();
/* Check if an index is defined on an ancestor of the current node.
* If yes, scan the ancestor to get the offset of the first character
* in the current node. For example, if the indexed node is <a>abc<b>de</b></a>
* and we query for //a[text:ngram-contains(., 'de')]/b, proxy will be a <b> node, but
* the offsets of the matches are relative to the start of <a>.
*/
NodeSet ancestors = null;
Match nextMatch = this.match;
while (nextMatch != null) {
if (proxy.getNodeId().isDescendantOf(nextMatch.getNodeId())) {
if (ancestors == null) {
ancestors = new NewArrayNodeSet();
}
ancestors.add(new NodeProxy(proxy.getOwnerDocument(), nextMatch.getNodeId()));
}
nextMatch = nextMatch.getNextMatch();
}
if (ancestors != null && !ancestors.isEmpty()) {
for (final NodeProxy p : ancestors) {
scanMatches(p);
}
}
}
Aggregations