use of org.exist.dom.persistent.DocumentSet in project exist by eXist-db.
the class XMLDBDefragment method eval.
/* (non-Javadoc)
* @see org.exist.xquery.Expression#eval(org.exist.dom.persistent.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item)
*/
public Sequence eval(Sequence[] args, Sequence contextSequence) throws XPathException {
// Get nodes
final NodeSet nodes = args[0].toNodeSet();
final DocumentSet docs = nodes.getDocumentSet();
try {
if (args.length > 1) {
// Use supplied parameter
final int splitCount = ((IntegerValue) args[1].itemAt(0)).getInt();
Modification.checkFragmentation(context, docs, splitCount);
} else {
// Use conf.xml configured value or -1 if not existent
Modification.checkFragmentation(context, docs);
}
} catch (final LockException | EXistException e) {
logger.error("An error occurred while defragmenting documents: {}", e.getMessage());
throw new XPathException(this, "An error occurred while defragmenting documents: " + e.getMessage(), e);
}
return Sequence.EMPTY_SEQUENCE;
}
use of org.exist.dom.persistent.DocumentSet in project exist by eXist-db.
the class TestDataGenerator method generate.
public Path[] generate(final DBBroker broker, final Collection collection, final String xqueryContent) throws SAXException {
try {
final DocumentSet docs = collection.allDocs(broker, new DefaultDocumentSet(), true);
final XQuery service = broker.getBrokerPool().getXQueryService();
final XQueryContext context = new XQueryContext(broker.getBrokerPool());
context.declareVariable("filename", "");
context.declareVariable("count", "0");
context.setStaticallyKnownDocuments(docs);
final String query = IMPORT + xqueryContent;
final CompiledXQuery compiled = service.compile(context, query);
for (int i = 0; i < count; i++) {
generatedFiles[i] = Files.createTempFile(prefix, ".xml");
context.declareVariable("filename", generatedFiles[i].getFileName().toString());
context.declareVariable("count", new Integer(i));
final Sequence results = service.execute(broker, compiled, Sequence.EMPTY_SEQUENCE);
final Serializer serializer = broker.borrowSerializer();
try (final Writer out = Files.newBufferedWriter(generatedFiles[i], StandardCharsets.UTF_8)) {
final SAXSerializer sax = new SAXSerializer(out, outputProps);
serializer.setSAXHandlers(sax, sax);
for (final SequenceIterator iter = results.iterate(); iter.hasNext(); ) {
final Item item = iter.nextItem();
if (!Type.subTypeOf(item.getType(), Type.NODE)) {
continue;
}
serializer.toSAX((NodeValue) item);
}
} finally {
broker.returnSerializer(serializer);
}
}
} catch (final XPathException | PermissionDeniedException | LockException | IOException e) {
LOG.error(e.getMessage(), e);
throw new SAXException(e.getMessage(), e);
}
return generatedFiles;
}
use of org.exist.dom.persistent.DocumentSet in project exist by eXist-db.
the class Modification method selectAndLock.
/**
* Acquire a lock on all documents processed by this modification. We have
* to avoid that node positions change during the operation.
* feature trigger_update :
* At the same time we leverage on the fact that it's called before
* database modification to call the eventual triggers.
*
* @param transaction the database transaction.
*
* @return The selected document nodes.
*
* @throws LockException if a lock error occurs
* @throws PermissionDeniedException if the caller has insufficient priviledges
* @throws EXistException if the database raises an error
* @throws XPathException if the XPath raises an error
* @throws TriggerException if a trigger raises an error
*/
protected final StoredNode[] selectAndLock(Txn transaction) throws LockException, PermissionDeniedException, EXistException, XPathException, TriggerException {
final java.util.concurrent.locks.Lock globalLock = broker.getBrokerPool().getGlobalUpdateLock();
globalLock.lock();
try {
final NodeList nl = select(docs);
final DocumentSet lockedDocuments = ((NodeSet) nl).getDocumentSet();
// acquire a lock on all documents
// we have to avoid that node positions change
// during the modification
lockedDocumentsLocks = lockedDocuments.lock(broker, true);
final StoredNode[] ql = new StoredNode[nl.getLength()];
for (int i = 0; i < ql.length; i++) {
ql[i] = (StoredNode) nl.item(i);
final DocumentImpl doc = ql[i].getOwnerDocument();
// call the eventual triggers
// TODO -jmv separate loop on docs and not on nodes
// prepare Trigger
prepareTrigger(transaction, doc);
}
return ql;
} finally {
globalLock.unlock();
}
}
use of org.exist.dom.persistent.DocumentSet in project exist by eXist-db.
the class HistoryTriggerTest method checkHistoryOfOriginal.
private void checkHistoryOfOriginal(final BrokerPool brokerPool, final XmldbURI originalDocName, final String orginalDocContent) throws EXistException, PermissionDeniedException, LockException {
try (final DBBroker broker = brokerPool.get(Optional.of(brokerPool.getSecurityManager().getSystemSubject()));
final Txn transaction = brokerPool.getTransactionManager().beginTransaction()) {
try (final Collection historyCollection = broker.openCollection(HistoryTrigger.DEFAULT_ROOT_PATH.append(TEST_COLLECTION_URI).append(originalDocName), Lock.LockMode.READ_LOCK)) {
assertNotNull(historyCollection);
final DocumentSet documentSet = historyCollection.getDocuments(broker, new DefaultDocumentSet());
assertEquals(1, documentSet.getDocumentCount());
final Iterator<DocumentImpl> it = documentSet.getDocumentIterator();
assertTrue(it.hasNext());
final DocumentImpl doc = it.next();
final Diff diff = DiffBuilder.compare(Input.from(orginalDocContent)).withTest(Input.from(doc)).build();
assertFalse(diff.toString(), diff.hasDifferences());
assertFalse(it.hasNext());
}
transaction.commit();
}
}
use of org.exist.dom.persistent.DocumentSet in project exist by eXist-db.
the class NativeStructuralIndexWorkerTest method getDocIdRanges_multipleContiguous.
@Test
public void getDocIdRanges_multipleContiguous() {
final NativeStructuralIndexWorker indexWorker = new NativeStructuralIndexWorker(null);
final DocumentSet docs = documentIdSet(Arrays.asList(1, 2, 3, 4, 5, 6, 88, 89, 3, 4, 5, 6, 77, 10, 11, 12));
final List<NativeStructuralIndexWorker.Range> ranges = indexWorker.getDocIdRanges(docs);
assertEquals(5, ranges.size());
assertEquals(1, ranges.get(0).start);
assertEquals(6, ranges.get(0).end);
assertEquals(88, ranges.get(1).start);
assertEquals(89, ranges.get(1).end);
assertEquals(3, ranges.get(2).start);
assertEquals(6, ranges.get(2).end);
assertEquals(77, ranges.get(3).start);
assertEquals(77, ranges.get(3).end);
assertEquals(10, ranges.get(4).start);
assertEquals(12, ranges.get(4).end);
}
Aggregations