use of org.exist.storage.dom.INodeIterator in project exist by eXist-db.
the class NativeSerializer method serializeToReceiver.
protected void serializeToReceiver(NodeProxy p, boolean generateDocEvent, boolean checkAttributes) throws SAXException {
if (Type.subTypeOf(p.getType(), Type.DOCUMENT) || p.getNodeId() == NodeId.DOCUMENT_NODE) {
serializeToReceiver(p.getOwnerDocument(), generateDocEvent);
return;
}
setDocument(p.getOwnerDocument());
if (generateDocEvent && !documentStarted) {
receiver.startDocument();
documentStarted = true;
}
try (final INodeIterator domIter = broker.getNodeIterator(p)) {
serializeToReceiver(null, domIter, p.getOwnerDocument(), checkAttributes, p.getMatches(), new TreeSet<>());
} catch (final IOException e) {
LOG.warn("Unable to close node iterator", e);
}
if (generateDocEvent) {
receiver.endDocument();
}
}
use of org.exist.storage.dom.INodeIterator in project exist by eXist-db.
the class NativeSerializer method serializeToReceiver.
protected void serializeToReceiver(DocumentImpl doc, boolean generateDocEvent) throws SAXException {
final long start = System.currentTimeMillis();
setDocument(doc);
final NodeList children = doc.getChildNodes();
if (generateDocEvent && !documentStarted) {
receiver.startDocument();
documentStarted = true;
}
if (doc.getDoctype() != null) {
if ("yes".equals(getProperty(EXistOutputKeys.OUTPUT_DOCTYPE, "no"))) {
final DocumentTypeImpl docType = (DocumentTypeImpl) doc.getDoctype();
serializeToReceiver(docType, null, docType.getOwnerDocument(), true, null, new TreeSet<>());
}
}
// iterate through children
for (int i = 0; i < children.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) children.item(i);
try (final INodeIterator domIter = broker.getNodeIterator(node)) {
domIter.next();
final NodeProxy p = new NodeProxy(node);
serializeToReceiver(node, domIter, (DocumentImpl) node.getOwnerDocument(), true, p.getMatches(), new TreeSet<>());
} catch (final IOException ioe) {
LOG.warn("Unable to close node iterator", ioe);
}
}
if (generateDocEvent) {
receiver.endDocument();
}
if (LOG.isDebugEnabled()) {
LOG.debug("serializing document {} ({}) to SAX took {} msec", doc.getDocId(), doc.getURI(), System.currentTimeMillis() - start);
}
}
use of org.exist.storage.dom.INodeIterator in project exist by eXist-db.
the class NativeBroker method checkXMLResourceTree.
/**
* consistency Check of the database; useful after XUpdates;
* called by {@link #checkXMLResourceConsistency(DocumentImpl)}
*/
@Override
public void checkXMLResourceTree(final DocumentImpl doc) {
LOG.debug("Checking DOM tree for document {}", doc.getFileURI());
boolean xupdateConsistencyChecks = false;
final Object property = pool.getConfiguration().getProperty(PROPERTY_XUPDATE_CONSISTENCY_CHECKS);
if (property != null) {
xupdateConsistencyChecks = (Boolean) property;
}
if (xupdateConsistencyChecks) {
new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) {
@Override
public Object start() throws ReadOnlyException {
LOG.debug("Pages used: {}", domDb.debugPages(doc, false));
return null;
}
}.run();
final NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
final IStoredNode node = (IStoredNode) nodes.item(i);
try (final INodeIterator iterator = getNodeIterator(node)) {
iterator.next();
final StringBuilder buf = new StringBuilder();
// Pass buf to the following method to get a dump of all node ids in the document
if (!checkNodeTree(iterator, node, buf)) {
LOG.debug("node tree: {}", buf.toString());
throw new RuntimeException("Error in document tree structure");
}
} catch (final IOException e) {
LOG.error(e);
}
}
final NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeReadLock(domDb.getLockName())) {
@Override
public Object start() {
try {
domDb.findKeys(idx);
} catch (final BTreeException | IOException e) {
LOG.error("start() - " + "error while removing doc", e);
}
return null;
}
}.run();
}
}
use of org.exist.storage.dom.INodeIterator in project exist by eXist-db.
the class NativeBroker method copyXMLResource.
private void copyXMLResource(final Txn transaction, @EnsureLocked(mode = LockMode.READ_LOCK) final DocumentImpl oldDoc, @EnsureLocked(mode = LockMode.WRITE_LOCK) final DocumentImpl newDoc) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Copying document {} to {}", oldDoc.getFileURI(), newDoc.getURI());
}
final long start = System.currentTimeMillis();
final StreamListener listener = getIndexController().getStreamListener(newDoc, ReindexMode.STORE);
final NodeList nodes = oldDoc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) nodes.item(i);
try (final INodeIterator iterator = getNodeIterator(node)) {
iterator.next();
copyNodes(transaction, iterator, node, new NodePath2(), newDoc, false, listener);
}
}
flush();
closeDocument();
if (LOG.isDebugEnabled()) {
LOG.debug("Copy took {} ms.", (System.currentTimeMillis() - start));
}
}
use of org.exist.storage.dom.INodeIterator in project exist by eXist-db.
the class NativeBroker method defragXMLResource.
@Override
public void defragXMLResource(final Txn transaction, final DocumentImpl doc) {
// TODO : use dedicated function in XmldbURI
if (LOG.isDebugEnabled())
LOG.debug("============> Defragmenting document {}", doc.getURI());
final long start = System.currentTimeMillis();
try {
final long firstChild = doc.getFirstChildAddress();
// dropping old structure index
dropIndex(transaction, doc);
// dropping dom index
final NodeRef ref = new NodeRef(doc.getDocId());
final IndexQuery idx = new IndexQuery(IndexQuery.TRUNC_RIGHT, ref);
new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) {
@Override
public Object start() {
try {
domDb.remove(transaction, idx, null);
domDb.flush();
} catch (final IOException | DBException e) {
LOG.error("start() - " + "error while removing doc", e);
} catch (final TerminatedException e) {
LOG.error("method terminated", e);
}
return null;
}
}.run();
// create a copy of the old doc to copy the nodes into it
final DocumentImpl tempDoc = new DocumentImpl(pool, doc.getCollection(), doc.getDocId(), doc.getFileURI());
tempDoc.copyOf(this, doc, doc);
final StreamListener listener = getIndexController().getStreamListener(doc, ReindexMode.STORE);
// copy the nodes
final NodeList nodes = doc.getChildNodes();
for (int i = 0; i < nodes.getLength(); i++) {
final IStoredNode<?> node = (IStoredNode<?>) nodes.item(i);
try (final INodeIterator iterator = getNodeIterator(node)) {
iterator.next();
copyNodes(transaction, iterator, node, new NodePath2(), tempDoc, true, listener);
}
}
flush();
// remove the old nodes
new DOMTransaction(this, domDb, () -> lockManager.acquireBtreeWriteLock(domDb.getLockName())) {
@Override
public Object start() {
domDb.removeAll(transaction, firstChild);
try {
domDb.flush();
} catch (final DBException e) {
LOG.error("start() - error while removing doc", e);
}
return null;
}
}.run();
doc.copyChildren(tempDoc);
doc.setSplitCount(0);
doc.setPageCount(tempDoc.getPageCount());
storeXMLResource(transaction, doc);
closeDocument();
if (LOG.isDebugEnabled()) {
LOG.debug("Defragmentation took {} ms.", (System.currentTimeMillis() - start));
}
} catch (final PermissionDeniedException | IOException e) {
LOG.error(e);
}
}
Aggregations