use of org.exist.dom.persistent.DocumentImpl in project exist by eXist-db.
the class SystemImportHandler method restoreDeletedEntry.
private void restoreDeletedEntry(final Attributes atts) {
final String name = atts.getValue("name");
final String type = atts.getValue("type");
if ("collection".equals(type)) {
try {
final Collection col = broker.getCollection(currentCollection.getURI().append(name));
if (col != null) {
// delete
try (final Txn transaction = beginTransaction()) {
broker.removeCollection(transaction, col);
transaction.commit();
} catch (final Exception e) {
listener.warn("Failed to remove deleted collection: " + name + ": " + e.getMessage());
}
}
} catch (final Exception e) {
listener.warn("Failed to remove deleted collection: " + name + ": " + e.getMessage());
}
} else if ("resource".equals(type)) {
try {
final XmldbURI uri = XmldbURI.create(name);
final DocumentImpl doc = currentCollection.getDocument(broker, uri);
if (doc != null) {
try (final Txn transaction = beginTransaction()) {
if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
currentCollection.removeBinaryResource(transaction, broker, uri);
} else {
currentCollection.removeXMLResource(transaction, broker, uri);
}
transaction.commit();
} catch (final Exception e) {
listener.warn("Failed to remove deleted resource: " + name + ": " + e.getMessage());
}
}
} catch (final Exception e) {
listener.warn("Failed to remove deleted resource: " + name + ": " + e.getMessage());
}
}
}
use of org.exist.dom.persistent.DocumentImpl in project exist by eXist-db.
the class XIncludeFilter method processXInclude.
/**
* @param href The resource to be xincluded
* @param xpointer The xpointer
* @return Optionally a ResourceError if it was not possible to retrieve the resource
* to be xincluded
* @throws SAXException If a SAX processing error occurs
*/
protected Optional<ResourceError> processXInclude(final String href, String xpointer) throws SAXException {
if (href == null) {
throw new SAXException("No href attribute found in XInclude include element");
}
// save some settings
DocumentImpl prevDoc = document;
boolean createContainerElements = serializer.createContainerElements;
serializer.createContainerElements = false;
// The following comments are the basis for possible external documents
XmldbURI docUri = null;
try {
docUri = XmldbURI.xmldbUriFor(href);
/*
if(!stylesheetUri.toCollectionPathURI().equals(stylesheetUri)) {
externalUri = stylesheetUri.getXmldbURI();
}
*/
} catch (final URISyntaxException e) {
// could be an external URI!
}
// parse the href attribute
LOG.debug("found href=\"{}\"", href);
// String xpointer = null;
// String docName = href;
Map<String, String> params = null;
DocumentImpl doc = null;
org.exist.dom.memtree.DocumentImpl memtreeDoc = null;
boolean xqueryDoc = false;
if (docUri != null) {
final String fragment = docUri.getFragment();
if (!(fragment == null || fragment.length() == 0)) {
throw new SAXException("Fragment identifiers must not be used in an xinclude href attribute. To specify an xpointer, use the xpointer attribute.");
}
// extract possible parameters in the URI
params = null;
final String paramStr = docUri.getQuery();
if (paramStr != null) {
params = processParameters(paramStr);
// strip query part
docUri = XmldbURI.create(docUri.getRawCollectionPath());
}
// Patch 1520454 start
if (!docUri.isAbsolute() && document != null) {
final String base = document.getCollection().getURI() + "/";
final String child = "./" + docUri.toString();
final URI baseUri = URI.create(base);
final URI childUri = URI.create(child);
final URI uri = baseUri.resolve(childUri);
docUri = XmldbURI.create(uri);
}
// retrieve the document
try {
doc = serializer.broker.getResource(docUri, Permission.READ);
} catch (final PermissionDeniedException e) {
return Optional.of(new ResourceError("Permission denied to read XInclude'd resource", e));
}
/* Check if the document is a stored XQuery */
if (doc != null && doc.getResourceType() == DocumentImpl.BINARY_FILE) {
xqueryDoc = "application/xquery".equals(doc.getMimeType());
}
}
// The document could not be found: check if it points to an external resource
if (docUri == null || (doc == null && !docUri.isAbsolute())) {
try {
URI externalUri = new URI(href);
final String scheme = externalUri.getScheme();
// XQuery context.
if (scheme == null && moduleLoadPath != null) {
final String path = externalUri.getSchemeSpecificPart();
Path f = Paths.get(path);
if (!f.isAbsolute()) {
if (moduleLoadPath.startsWith(XmldbURI.XMLDB_URI_PREFIX)) {
final XmldbURI parentUri = XmldbURI.create(moduleLoadPath);
docUri = parentUri.append(path);
doc = (DocumentImpl) serializer.broker.getXMLResource(docUri);
if (doc != null && !doc.getPermissions().validate(serializer.broker.getCurrentSubject(), Permission.READ)) {
throw new PermissionDeniedException("Permission denied to read XInclude'd resource");
}
} else {
f = Paths.get(moduleLoadPath, path);
externalUri = f.toUri();
}
}
}
if (doc == null) {
final Either<ResourceError, org.exist.dom.memtree.DocumentImpl> external = parseExternal(externalUri);
if (external.isLeft()) {
return Optional.of(external.left().get());
} else {
memtreeDoc = external.right().get();
}
}
} catch (final PermissionDeniedException e) {
return Optional.of(new ResourceError("Permission denied on XInclude'd resource", e));
} catch (final ParserConfigurationException | URISyntaxException e) {
throw new SAXException("XInclude: failed to parse document at URI: " + href + ": " + e.getMessage(), e);
}
}
/* if document has not been found and xpointer is
* null, throw an exception. If xpointer != null
* we retry below and interpret docName as
* a collection.
*/
if (doc == null && memtreeDoc == null && xpointer == null) {
return Optional.of(new ResourceError("document " + docUri + " not found"));
}
if (xpointer == null && !xqueryDoc) {
// no xpointer found - just serialize the doc
if (memtreeDoc == null) {
serializer.serializeToReceiver(doc, false);
} else {
serializer.serializeToReceiver(memtreeDoc, false);
}
} else {
// process the xpointer or the stored XQuery
Source source = null;
final XQueryPool pool = serializer.broker.getBrokerPool().getXQueryPool();
CompiledXQuery compiled = null;
try {
if (xpointer == null) {
source = new DBSource(serializer.broker, (BinaryDocument) doc, true);
} else {
xpointer = checkNamespaces(xpointer);
source = new StringSource(xpointer);
}
final XQuery xquery = serializer.broker.getBrokerPool().getXQueryService();
XQueryContext context;
compiled = pool.borrowCompiledXQuery(serializer.broker, source);
if (compiled == null) {
context = new XQueryContext(serializer.broker.getBrokerPool());
} else {
context = compiled.getContext();
context.prepareForReuse();
}
context.declareNamespaces(namespaces);
context.declareNamespace("xinclude", Namespaces.XINCLUDE_NS);
// setup the http context if known
if (serializer.httpContext != null) {
context.setHttpContext(serializer.httpContext);
}
// TODO: change these to putting the XmldbURI in, but we need to warn users!
if (document != null) {
context.declareVariable("xinclude:current-doc", document.getFileURI().toString());
context.declareVariable("xinclude:current-collection", document.getCollection().getURI().toString());
}
if (xpointer != null) {
if (doc != null) {
context.setStaticallyKnownDocuments(new XmldbURI[] { doc.getURI() });
} else if (docUri != null) {
context.setStaticallyKnownDocuments(new XmldbURI[] { docUri });
}
}
// pass parameters as variables
if (params != null) {
for (final Map.Entry<String, String> entry : params.entrySet()) {
context.declareVariable(entry.getKey(), entry.getValue());
}
}
if (compiled == null) {
try {
compiled = xquery.compile(context, source, xpointer != null);
} catch (final IOException e) {
throw new SAXException("I/O error while reading query for xinclude: " + e.getMessage(), e);
}
} else {
compiled.getContext().updateContext(context);
context.getWatchDog().reset();
}
LOG.info("xpointer query: {}", ExpressionDumper.dump((Expression) compiled));
Sequence contextSeq = null;
if (memtreeDoc != null) {
contextSeq = memtreeDoc;
}
try {
final Sequence seq = xquery.execute(serializer.broker, compiled, contextSeq);
if (Type.subTypeOf(seq.getItemType(), Type.NODE)) {
if (LOG.isDebugEnabled()) {
LOG.debug("xpointer found: {}", seq.getItemCount());
}
NodeValue node;
for (final SequenceIterator i = seq.iterate(); i.hasNext(); ) {
node = (NodeValue) i.nextItem();
serializer.serializeToReceiver(node, false);
}
} else {
String val;
for (int i = 0; i < seq.getItemCount(); i++) {
val = seq.itemAt(i).getStringValue();
characters(val);
}
}
} finally {
context.runCleanupTasks();
}
} catch (final XPathException | PermissionDeniedException e) {
LOG.warn("xpointer error", e);
throw new SAXException("Error while processing XInclude expression: " + e.getMessage(), e);
} finally {
if (compiled != null) {
pool.returnCompiledXQuery(source, compiled);
}
}
}
// restore settings
document = prevDoc;
serializer.createContainerElements = createContainerElements;
return Optional.empty();
}
use of org.exist.dom.persistent.DocumentImpl in project exist by eXist-db.
the class NativeStructuralIndexWorker method removeCollection.
@Override
public void removeCollection(Collection collection, DBBroker broker, boolean reindex) throws PermissionDeniedException {
try {
for (final Iterator<DocumentImpl> i = collection.iterator(broker); i.hasNext(); ) {
final DocumentImpl doc = i.next();
removeDocument(doc);
}
} catch (final LockException e) {
LOG.error(e);
}
}
use of org.exist.dom.persistent.DocumentImpl in project exist by eXist-db.
the class NativeStructuralIndexWorker method findDescendantsByTagName.
public NodeSet findDescendantsByTagName(byte type, QName qname, int axis, DocumentSet docs, NodeSet contextSet, int contextId, Expression parent) {
final NewArrayNodeSet result = new NewArrayNodeSet();
final FindDescendantsCallback callback = new FindDescendantsCallback(type, axis, qname, contextId, result, parent);
try (final ManagedLock<ReentrantLock> btreeLock = index.lockManager.acquireBtreeReadLock(index.btree.getLockName())) {
for (final NodeProxy ancestor : contextSet) {
final DocumentImpl doc = ancestor.getOwnerDocument();
final NodeId ancestorId = ancestor.getNodeId();
callback.setAncestor(doc, ancestor);
final byte[] fromKey;
final byte[] toKey;
if (ancestorId == NodeId.DOCUMENT_NODE) {
fromKey = computeKey(type, qname, doc.getDocId());
toKey = computeKey(type, qname, doc.getDocId() + 1);
} else {
fromKey = computeKey(type, qname, doc.getDocId(), ancestorId);
toKey = computeKey(type, qname, doc.getDocId(), ancestorId.nextSibling());
}
final IndexQuery query = new IndexQuery(IndexQuery.RANGE, new Value(fromKey), new Value(toKey));
try {
index.btree.query(query, callback);
} catch (final Exception e) {
NativeStructuralIndex.LOG.error("Error while searching structural index: {}", e.getMessage(), e);
}
}
} catch (final LockException e) {
NativeStructuralIndex.LOG.warn("Lock problem while searching structural index: {}", e.getMessage(), e);
}
result.updateNoSort();
return result;
}
use of org.exist.dom.persistent.DocumentImpl in project exist by eXist-db.
the class NativeStructuralIndexWorker method getDocIdRanges.
/**
* Scan the document set to find document id ranges to query
*
* @param docs the document set
* @return List of contiguous document id ranges
*/
List<Range> getDocIdRanges(final DocumentSet docs) {
final List<Range> ranges = new ArrayList<>();
Range next = null;
for (final Iterator<DocumentImpl> i = docs.getDocumentIterator(); i.hasNext(); ) {
final DocumentImpl doc = i.next();
if (next == null) {
next = new Range(doc.getDocId());
} else if (next.end + 1 == doc.getDocId()) {
next.end++;
} else {
ranges.add(next);
next = new Range(doc.getDocId());
}
}
if (next != null) {
ranges.add(next);
}
return ranges;
}
Aggregations