Search in sources :

Example 1 with StreamListener

use of org.exist.indexing.StreamListener in project exist by eXist-db.

the class ElementImpl method appendChildren.

@Override
public void appendChildren(final Txn transaction, NodeList nodes, final int child) throws DOMException {
    // attributes are handled differently. Call checkForAttributes to extract them.
    nodes = checkForAttributes(transaction, nodes);
    if (nodes == null || nodes.getLength() == 0) {
        return;
    }
    try (final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) {
        final NodePath path = getPath();
        StreamListener listener = null;
        final IndexController indexes = broker.getIndexController();
        // May help getReindexRoot() to make some useful things
        indexes.setDocument(ownerDocument);
        final IStoredNode reindexRoot = indexes.getReindexRoot(this, path, true, true);
        indexes.setMode(ReindexMode.STORE);
        // only reindex if reindexRoot is an ancestor of the current node
        if (reindexRoot == null) {
            listener = indexes.getStreamListener();
        }
        if (children == 0) {
            // no children: append a new child
            appendChildren(transaction, nodeId.newChild(), null, new NodeImplRef(this), path, nodes, listener);
        } else {
            if (child == 1) {
                final Node firstChild = getFirstChild();
                insertBefore(transaction, nodes, firstChild);
            } else {
                if (child > 1 && child <= children) {
                    final NodeList cl = getAttrsAndChildNodes();
                    final IStoredNode<?> last = (IStoredNode<?>) cl.item(child - 2);
                    insertAfter(transaction, nodes, last);
                } else {
                    final IStoredNode<?> last = (IStoredNode<?>) getLastChild(true);
                    appendChildren(transaction, last.getNodeId().nextSibling(), null, new NodeImplRef(getLastNode(last)), path, nodes, listener);
                }
            }
        }
        broker.updateNode(transaction, this, false);
        indexes.reindex(transaction, reindexRoot, ReindexMode.STORE);
        broker.flush();
    } catch (final EXistException e) {
        LOG.warn("Exception while appending child node: {}", e.getMessage(), e);
    }
}
Also used : IndexController(org.exist.indexing.IndexController) EXistException(org.exist.EXistException) StreamListener(org.exist.indexing.StreamListener)

Example 2 with StreamListener

use of org.exist.indexing.StreamListener in project exist by eXist-db.

the class ElementImpl method replaceChild.

/**
 * Replaces the oldNode with the newChild
 *
 * @param transaction the transaction
 * @param newChild to replace oldChild
 * @param oldChild to be replace by newChild
 * @return The new node (this differs from the {@link org.w3c.dom.Node#replaceChild(Node, Node)} specification)
 * @throws DOMException in case of a DOM error
 * @see org.w3c.dom.Node#replaceChild(org.w3c.dom.Node, org.w3c.dom.Node)
 */
@Override
public Node replaceChild(final Txn transaction, final Node newChild, final Node oldChild) throws DOMException {
    if (!(oldChild instanceof IStoredNode)) {
        throw new DOMException(DOMException.WRONG_DOCUMENT_ERR, "Wrong node type");
    }
    final IStoredNode<?> oldNode = (IStoredNode<?>) oldChild;
    if (!oldNode.getNodeId().getParentId().equals(nodeId)) {
        throw new DOMException(DOMException.NOT_FOUND_ERR, "Node is not a child of this element");
    }
    final NodePath thisPath = getPath();
    IStoredNode<?> previous = (IStoredNode<?>) oldNode.getPreviousSibling();
    if (previous == null) {
        previous = this;
    } else {
        previous = getLastNode(previous);
    }
    final NodePath oldPath = oldNode.getPath();
    StreamListener listener = null;
    Node newNode = null;
    try (final DBBroker broker = ownerDocument.getBrokerPool().getBroker()) {
        final IndexController indexes = broker.getIndexController();
        // May help getReindexRoot() to make some useful things
        indexes.setDocument(ownerDocument);
        final IStoredNode reindexRoot = broker.getIndexController().getReindexRoot(oldNode, oldPath, false);
        indexes.setMode(ReindexMode.REMOVE_SOME_NODES);
        if (reindexRoot == null) {
            listener = indexes.getStreamListener();
        } else {
            indexes.reindex(transaction, reindexRoot, ReindexMode.REMOVE_SOME_NODES);
        }
        broker.removeAllNodes(transaction, oldNode, oldPath, listener);
        broker.endRemove(transaction);
        broker.flush();
        indexes.setMode(ReindexMode.STORE);
        listener = indexes.getStreamListener();
        newNode = appendChild(transaction, oldNode.getNodeId(), new NodeImplRef(previous), thisPath, newChild, listener);
        // Reindex if required
        broker.storeXMLResource(transaction, getOwnerDocument());
        broker.updateNode(transaction, this, false);
        indexes.reindex(transaction, reindexRoot, ReindexMode.STORE);
        broker.flush();
    } catch (final EXistException e) {
        LOG.warn("Exception while inserting node: {}", e.getMessage(), e);
    }
    // returning the newNode is more sensible than returning the oldNode
    return newNode;
}
Also used : IndexController(org.exist.indexing.IndexController) EXistException(org.exist.EXistException) StreamListener(org.exist.indexing.StreamListener)

Example 3 with StreamListener

use of org.exist.indexing.StreamListener in project exist by eXist-db.

the class NativeBroker method reindexXMLResource.

/**
 * Reindex the nodes in the document. This method will either reindex all
 * descendant nodes of the passed node, or all nodes below some level of
 * the document if node is null.
 */
@Override
public void reindexXMLResource(final Txn transaction, final DocumentImpl doc, final IndexMode mode) {
    final StreamListener listener = getIndexController().getStreamListener(doc, ReindexMode.STORE);
    getIndexController().startIndexDocument(transaction, listener);
    try {
        final NodeList nodes = doc.getChildNodes();
        for (int i = 0; i < nodes.getLength(); i++) {
            final IStoredNode<?> node = (IStoredNode<?>) nodes.item(i);
            try (final INodeIterator iterator = getNodeIterator(node)) {
                iterator.next();
                scanNodes(transaction, iterator, node, new NodePath2(), mode, listener);
            } catch (final IOException ioe) {
                LOG.error("Unable to close node iterator", ioe);
            }
        }
    } finally {
        getIndexController().endIndexDocument(transaction, listener);
    }
    flush();
}
Also used : INodeIterator(org.exist.storage.dom.INodeIterator) NodeList(org.w3c.dom.NodeList) StreamListener(org.exist.indexing.StreamListener)

Example 4 with StreamListener

use of org.exist.indexing.StreamListener in project exist by eXist-db.

the class MutableCollection method validateXMLResourceInternal.

/**
 * Validates an XML document et prepares it for further storage. Launches prepare and postValidate triggers.
 * Since the process is dependant from the collection configuration, the collection acquires a write lock during
 * the process.
 *
 * @param transaction The database transaction
 * @param broker      The database broker
 * @param name        the name (without path) of the document
 * @param validator   A function which validates the document of throws an Exception
 *
 * @return An {@link IndexInfo} with a write lock on the document.
 */
private IndexInfo validateXMLResourceInternal(final Txn transaction, final DBBroker broker, final XmldbURI name, final CollectionConfiguration config, final Consumer2E<IndexInfo, SAXException, EXistException> validator) throws EXistException, PermissionDeniedException, TriggerException, SAXException, LockException, IOException {
    // Make the necessary operations if we process a collection configuration document
    checkConfigurationDocument(transaction, broker, name);
    final Database db = broker.getBrokerPool();
    if (db.isReadOnly()) {
        throw new IOException("Database is read-only");
    }
    ManagedDocumentLock documentWriteLock = null;
    DocumentImpl oldDoc = null;
    db.getProcessMonitor().startJob(ProcessMonitor.ACTION_VALIDATE_DOC, name);
    try {
        try (final ManagedCollectionLock collectionLock = lockManager.acquireCollectionWriteLock(path)) {
            // acquire the WRITE_LOCK on the Document, this lock is released in storeXMLInternal via IndexInfo
            documentWriteLock = lockManager.acquireDocumentWriteLock(getURI().append(name.lastSegment()));
            oldDoc = documents.get(name.lastSegmentString());
            checkPermissionsForAddDocument(broker, oldDoc);
            // NOTE: the new `document` object actually gets discarded in favour of the `oldDoc` below if there is an oldDoc and it is XML (so we can use -1 as the docId because it will never be used)
            final int docId = (oldDoc != null && oldDoc.getResourceType() == DocumentImpl.XML_FILE) ? -1 : broker.getNextResourceId(transaction);
            DocumentImpl document = new DocumentImpl((BrokerPool) db, this, docId, name);
            checkCollectionConflict(name);
            manageDocumentInformation(oldDoc, document);
            final Indexer indexer = new Indexer(broker, transaction);
            final IndexInfo info = new IndexInfo(indexer, config, documentWriteLock);
            info.setCreating(oldDoc == null);
            info.setOldDocPermissions(oldDoc != null ? oldDoc.getPermissions() : null);
            indexer.setDocument(document, config);
            indexer.setValidating(true);
            final DocumentTriggers trigger = new DocumentTriggers(broker, transaction, indexer, this, broker.isTriggersEnabled() ? config : null);
            trigger.setValidating(true);
            info.setTriggers(trigger);
            if (oldDoc == null) {
                trigger.beforeCreateDocument(broker, transaction, getURI().append(name));
            } else {
                trigger.beforeUpdateDocument(broker, transaction, oldDoc);
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("Scanning document {}", getURI().append(name));
            }
            validator.accept(info);
            // new document is valid: remove old document
            if (oldDoc != null) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug("removing old document {}", oldDoc.getFileURI());
                }
                updateModificationTime(document);
                /**
                 * Matching {@link StreamListener#endReplaceDocument(Txn)} call is in
                 * {@link #storeXMLInternal(Txn, DBBroker, IndexInfo, Consumer2E)}
                 */
                final StreamListener listener = broker.getIndexController().getStreamListener(document, StreamListener.ReindexMode.REPLACE_DOCUMENT);
                listener.startReplaceDocument(transaction);
                if (oldDoc.getResourceType() == DocumentImpl.BINARY_FILE) {
                    // TODO : use a more elaborated method ? No triggers...
                    broker.removeBinaryResource(transaction, (BinaryDocument) oldDoc);
                    documents.remove(oldDoc.getFileURI().lastSegmentString());
                    addDocument(transaction, broker, document);
                } else {
                    // TODO : use a more elaborated method ? No triggers...
                    broker.removeXMLResource(transaction, oldDoc, false);
                    oldDoc.copyOf(broker, document, oldDoc);
                    indexer.setDocumentObject(oldDoc);
                    // old has become new at this point
                    document = oldDoc;
                }
                if (LOG.isDebugEnabled()) {
                    LOG.debug("removed old document {}", oldDoc.getFileURI());
                }
            } else {
                addDocument(transaction, broker, document);
            }
            trigger.setValidating(false);
            return info;
        }
    } catch (final EXistException | PermissionDeniedException | SAXException | LockException | IOException e) {
        // if there is an exception and we hold the document WRITE_LOCK we must release it
        if (documentWriteLock != null) {
            documentWriteLock.close();
        }
        throw e;
    } finally {
        db.getProcessMonitor().endJob();
    }
}
Also used : EXistException(org.exist.EXistException) SAXException(org.xml.sax.SAXException) Indexer(org.exist.Indexer) LockException(org.exist.util.LockException) Database(org.exist.Database) PermissionDeniedException(org.exist.security.PermissionDeniedException) StreamListener(org.exist.indexing.StreamListener)

Example 5 with StreamListener

use of org.exist.indexing.StreamListener in project exist by eXist-db.

the class MutableCollection method storeXMLInternal.

/**
 * Stores an XML document in the database. {@link #validateXMLResourceInternal(Txn, DBBroker, XmldbURI,
 * CollectionConfiguration, Consumer2E)}should have been called previously in order to acquire a write lock
 * for the document. Launches the finish trigger.
 *
 * @param transaction The database transaction
 * @param broker      The database broker
 * @param info        Tracks information between validate and store phases
 * @param parserFn    A function which parses the XML document
 */
private void storeXMLInternal(final Txn transaction, final DBBroker broker, final IndexInfo info, final Consumer2E<IndexInfo, EXistException, SAXException> parserFn) throws EXistException, SAXException, PermissionDeniedException {
    final DocumentImpl document = info.getIndexer().getDocument();
    final Database db = broker.getBrokerPool();
    try {
        if (LOG.isDebugEnabled()) {
            LOG.debug("storing document {} ...", document.getDocId());
        }
        // Sanity check
        if (!lockManager.isDocumentLockedForWrite(document.getURI())) {
            LOG.warn("document is not locked for write !");
        }
        db.getProcessMonitor().startJob(ProcessMonitor.ACTION_STORE_DOC, document.getFileURI());
        parserFn.accept(info);
        broker.storeXMLResource(transaction, document);
        broker.flush();
        broker.closeDocument();
        // broker.checkTree(document);
        LOG.debug("document stored.");
    } finally {
        // This lock has been acquired in validateXMLResourceInternal()
        info.getDocumentLock().close();
        broker.getBrokerPool().getProcessMonitor().endJob();
    }
    if (info.isCreating()) {
        info.getTriggers().afterCreateDocument(broker, transaction, document);
    } else {
        final StreamListener listener = broker.getIndexController().getStreamListener();
        listener.endReplaceDocument(transaction);
        info.getTriggers().afterUpdateDocument(broker, transaction, document);
    }
    db.getNotificationService().notifyUpdate(document, (info.isCreating() ? UpdateListener.ADD : UpdateListener.UPDATE));
    // Is it a collection configuration file ?
    final XmldbURI docName = document.getFileURI();
    // TODO: *resolve* URI against CollectionConfigurationManager.CONFIG_COLLECTION_URI
    if (getURI().startsWith(XmldbURI.CONFIG_COLLECTION_URI) && docName.endsWith(CollectionConfiguration.COLLECTION_CONFIG_SUFFIX_URI)) {
        broker.sync(Sync.MAJOR);
        final CollectionConfigurationManager manager = broker.getBrokerPool().getConfigurationManager();
        if (manager != null) {
            try {
                manager.invalidate(getURI(), broker.getBrokerPool());
                manager.loadConfiguration(broker, this);
            } catch (final PermissionDeniedException | LockException pde) {
                throw new EXistException(pde.getMessage(), pde);
            } catch (final CollectionConfigurationException e) {
                // DIZ: should this exception really been thrown? bugid=1807744
                throw new EXistException("Error while reading new collection configuration: " + e.getMessage(), e);
            }
        }
    }
}
Also used : LockException(org.exist.util.LockException) Database(org.exist.Database) PermissionDeniedException(org.exist.security.PermissionDeniedException) EXistException(org.exist.EXistException) XmldbURI(org.exist.xmldb.XmldbURI) StreamListener(org.exist.indexing.StreamListener)

Aggregations

StreamListener (org.exist.indexing.StreamListener)16 EXistException (org.exist.EXistException)9 IndexController (org.exist.indexing.IndexController)9 NodeId (org.exist.numbering.NodeId)4 INodeIterator (org.exist.storage.dom.INodeIterator)4 NodeList (org.w3c.dom.NodeList)4 PermissionDeniedException (org.exist.security.PermissionDeniedException)3 Database (org.exist.Database)2 LockException (org.exist.util.LockException)2 XmldbURI (org.exist.xmldb.XmldbURI)2 PooledObject (org.apache.commons.pool2.PooledObject)1 DefaultPooledObject (org.apache.commons.pool2.impl.DefaultPooledObject)1 Indexer (org.exist.Indexer)1 AbstractStreamListener (org.exist.indexing.AbstractStreamListener)1 DOMTransaction (org.exist.storage.dom.DOMTransaction)1 TerminatedException (org.exist.xquery.TerminatedException)1 SAXException (org.xml.sax.SAXException)1